var/home/core/zuul-output/0000755000175000017500000000000015110070135014516 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015110115134015461 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000006616505415110115124017701 0ustar rootrootNov 21 14:03:28 crc systemd[1]: Starting Kubernetes Kubelet... Nov 21 14:03:28 crc restorecon[4751]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:28 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 21 14:03:29 crc restorecon[4751]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 21 14:03:29 crc kubenswrapper[4774]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 14:03:29 crc kubenswrapper[4774]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 21 14:03:29 crc kubenswrapper[4774]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 14:03:29 crc kubenswrapper[4774]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 14:03:29 crc kubenswrapper[4774]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 21 14:03:29 crc kubenswrapper[4774]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.841744 4774 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849204 4774 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849243 4774 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849248 4774 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849254 4774 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849259 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849264 4774 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849269 4774 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849274 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849278 4774 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849282 4774 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849285 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849289 4774 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849293 4774 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849297 4774 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849301 4774 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849305 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849310 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849315 4774 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849319 4774 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849324 4774 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849328 4774 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849332 4774 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849337 4774 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849340 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849344 4774 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849348 4774 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849352 4774 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849356 4774 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849360 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849366 4774 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849372 4774 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849377 4774 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849382 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849387 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849394 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849399 4774 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849405 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849410 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849415 4774 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849420 4774 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849427 4774 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849434 4774 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849441 4774 feature_gate.go:330] unrecognized feature gate: Example Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849446 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849451 4774 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849456 4774 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849461 4774 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849465 4774 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849470 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849473 4774 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849477 4774 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849480 4774 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849484 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849491 4774 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849496 4774 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849500 4774 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849504 4774 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849508 4774 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849511 4774 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849515 4774 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849518 4774 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849522 4774 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849526 4774 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849530 4774 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849535 4774 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849538 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849541 4774 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849545 4774 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849548 4774 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849552 4774 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.849555 4774 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849649 4774 flags.go:64] FLAG: --address="0.0.0.0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849658 4774 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849671 4774 flags.go:64] FLAG: --anonymous-auth="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849677 4774 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849683 4774 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849687 4774 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849693 4774 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849699 4774 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849704 4774 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849708 4774 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849713 4774 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849718 4774 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849723 4774 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849727 4774 flags.go:64] FLAG: --cgroup-root="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849733 4774 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849737 4774 flags.go:64] FLAG: --client-ca-file="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849742 4774 flags.go:64] FLAG: --cloud-config="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849747 4774 flags.go:64] FLAG: --cloud-provider="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849751 4774 flags.go:64] FLAG: --cluster-dns="[]" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849759 4774 flags.go:64] FLAG: --cluster-domain="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849763 4774 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849767 4774 flags.go:64] FLAG: --config-dir="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849771 4774 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849776 4774 flags.go:64] FLAG: --container-log-max-files="5" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849781 4774 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849786 4774 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849790 4774 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849795 4774 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849799 4774 flags.go:64] FLAG: --contention-profiling="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849804 4774 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849808 4774 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849813 4774 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849834 4774 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849841 4774 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849846 4774 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849851 4774 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849856 4774 flags.go:64] FLAG: --enable-load-reader="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849861 4774 flags.go:64] FLAG: --enable-server="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849865 4774 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849878 4774 flags.go:64] FLAG: --event-burst="100" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849883 4774 flags.go:64] FLAG: --event-qps="50" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849889 4774 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849894 4774 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849899 4774 flags.go:64] FLAG: --eviction-hard="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849905 4774 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849909 4774 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849915 4774 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849927 4774 flags.go:64] FLAG: --eviction-soft="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849931 4774 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849935 4774 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849939 4774 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849943 4774 flags.go:64] FLAG: --experimental-mounter-path="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849948 4774 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849952 4774 flags.go:64] FLAG: --fail-swap-on="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849956 4774 flags.go:64] FLAG: --feature-gates="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849961 4774 flags.go:64] FLAG: --file-check-frequency="20s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849965 4774 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849969 4774 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849974 4774 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849978 4774 flags.go:64] FLAG: --healthz-port="10248" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849982 4774 flags.go:64] FLAG: --help="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849986 4774 flags.go:64] FLAG: --hostname-override="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849991 4774 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849995 4774 flags.go:64] FLAG: --http-check-frequency="20s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.849999 4774 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850003 4774 flags.go:64] FLAG: --image-credential-provider-config="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850007 4774 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850011 4774 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850015 4774 flags.go:64] FLAG: --image-service-endpoint="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850019 4774 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850024 4774 flags.go:64] FLAG: --kube-api-burst="100" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850028 4774 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850033 4774 flags.go:64] FLAG: --kube-api-qps="50" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850037 4774 flags.go:64] FLAG: --kube-reserved="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850042 4774 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850046 4774 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850051 4774 flags.go:64] FLAG: --kubelet-cgroups="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850056 4774 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850062 4774 flags.go:64] FLAG: --lock-file="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850067 4774 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850072 4774 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850077 4774 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850087 4774 flags.go:64] FLAG: --log-json-split-stream="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850101 4774 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850107 4774 flags.go:64] FLAG: --log-text-split-stream="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850112 4774 flags.go:64] FLAG: --logging-format="text" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850117 4774 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850121 4774 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850125 4774 flags.go:64] FLAG: --manifest-url="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850129 4774 flags.go:64] FLAG: --manifest-url-header="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850135 4774 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850139 4774 flags.go:64] FLAG: --max-open-files="1000000" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850145 4774 flags.go:64] FLAG: --max-pods="110" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850149 4774 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850153 4774 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850157 4774 flags.go:64] FLAG: --memory-manager-policy="None" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850162 4774 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850166 4774 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850170 4774 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850174 4774 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850186 4774 flags.go:64] FLAG: --node-status-max-images="50" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850190 4774 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850194 4774 flags.go:64] FLAG: --oom-score-adj="-999" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850199 4774 flags.go:64] FLAG: --pod-cidr="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850203 4774 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850211 4774 flags.go:64] FLAG: --pod-manifest-path="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850216 4774 flags.go:64] FLAG: --pod-max-pids="-1" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850220 4774 flags.go:64] FLAG: --pods-per-core="0" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850225 4774 flags.go:64] FLAG: --port="10250" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850229 4774 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850234 4774 flags.go:64] FLAG: --provider-id="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850239 4774 flags.go:64] FLAG: --qos-reserved="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850243 4774 flags.go:64] FLAG: --read-only-port="10255" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850247 4774 flags.go:64] FLAG: --register-node="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850251 4774 flags.go:64] FLAG: --register-schedulable="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850255 4774 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850263 4774 flags.go:64] FLAG: --registry-burst="10" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850267 4774 flags.go:64] FLAG: --registry-qps="5" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850271 4774 flags.go:64] FLAG: --reserved-cpus="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850282 4774 flags.go:64] FLAG: --reserved-memory="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850287 4774 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850292 4774 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850296 4774 flags.go:64] FLAG: --rotate-certificates="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850300 4774 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850304 4774 flags.go:64] FLAG: --runonce="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850308 4774 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850313 4774 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850317 4774 flags.go:64] FLAG: --seccomp-default="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850321 4774 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850325 4774 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850339 4774 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850344 4774 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850349 4774 flags.go:64] FLAG: --storage-driver-password="root" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850353 4774 flags.go:64] FLAG: --storage-driver-secure="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850357 4774 flags.go:64] FLAG: --storage-driver-table="stats" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850361 4774 flags.go:64] FLAG: --storage-driver-user="root" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850365 4774 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850369 4774 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850373 4774 flags.go:64] FLAG: --system-cgroups="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850377 4774 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850384 4774 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850388 4774 flags.go:64] FLAG: --tls-cert-file="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850393 4774 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850415 4774 flags.go:64] FLAG: --tls-min-version="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850422 4774 flags.go:64] FLAG: --tls-private-key-file="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850428 4774 flags.go:64] FLAG: --topology-manager-policy="none" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850433 4774 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850437 4774 flags.go:64] FLAG: --topology-manager-scope="container" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850443 4774 flags.go:64] FLAG: --v="2" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850451 4774 flags.go:64] FLAG: --version="false" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850458 4774 flags.go:64] FLAG: --vmodule="" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850464 4774 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.850469 4774 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850583 4774 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850588 4774 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850598 4774 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850602 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850605 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850610 4774 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850614 4774 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850618 4774 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850621 4774 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850625 4774 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850629 4774 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850633 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850636 4774 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850640 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850644 4774 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850648 4774 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850651 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850656 4774 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850661 4774 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850664 4774 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850668 4774 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850672 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850678 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850681 4774 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850685 4774 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850688 4774 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850692 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850695 4774 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850699 4774 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850702 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850707 4774 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850711 4774 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850715 4774 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850718 4774 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850722 4774 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850725 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850729 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850733 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850747 4774 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850750 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850754 4774 feature_gate.go:330] unrecognized feature gate: Example Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850758 4774 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850763 4774 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850767 4774 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850771 4774 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850775 4774 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850778 4774 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850782 4774 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850785 4774 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850789 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850792 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850796 4774 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850800 4774 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850804 4774 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850809 4774 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850813 4774 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850832 4774 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850836 4774 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850841 4774 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850845 4774 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850850 4774 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850853 4774 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850857 4774 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850861 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850865 4774 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850868 4774 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850872 4774 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850875 4774 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850880 4774 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850883 4774 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.850887 4774 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.851864 4774 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.861186 4774 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.861224 4774 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861335 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861356 4774 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861365 4774 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861374 4774 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861382 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861390 4774 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861397 4774 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861404 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861410 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861417 4774 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861423 4774 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861430 4774 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861437 4774 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861442 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861449 4774 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861456 4774 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861463 4774 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861469 4774 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861476 4774 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861482 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861489 4774 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861496 4774 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861502 4774 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861509 4774 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861515 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861525 4774 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861534 4774 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861542 4774 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861549 4774 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861560 4774 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861571 4774 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861579 4774 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861588 4774 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861595 4774 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861601 4774 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861607 4774 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861612 4774 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861619 4774 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861624 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861629 4774 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861634 4774 feature_gate.go:330] unrecognized feature gate: Example Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861639 4774 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861645 4774 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861650 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861655 4774 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861660 4774 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861665 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861670 4774 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861675 4774 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861680 4774 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861686 4774 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861691 4774 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861696 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861702 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861708 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861713 4774 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861718 4774 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861724 4774 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861729 4774 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861735 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861740 4774 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861745 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861751 4774 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861759 4774 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861765 4774 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861773 4774 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861779 4774 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861786 4774 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861793 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861799 4774 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861805 4774 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.861833 4774 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.861996 4774 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862005 4774 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862012 4774 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862018 4774 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862025 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862030 4774 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862036 4774 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862043 4774 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862050 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862056 4774 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862062 4774 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862068 4774 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862073 4774 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862081 4774 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862088 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862094 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862100 4774 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862108 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862114 4774 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862120 4774 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862127 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862133 4774 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862138 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862144 4774 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862149 4774 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862154 4774 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862159 4774 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862165 4774 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862170 4774 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862176 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862182 4774 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862187 4774 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862193 4774 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862198 4774 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862205 4774 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862211 4774 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862218 4774 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862224 4774 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862231 4774 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862236 4774 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862241 4774 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862247 4774 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862259 4774 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862264 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862270 4774 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862275 4774 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862283 4774 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862289 4774 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862296 4774 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862303 4774 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862309 4774 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862316 4774 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862322 4774 feature_gate.go:330] unrecognized feature gate: Example Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862329 4774 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862336 4774 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862342 4774 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862349 4774 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862355 4774 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862362 4774 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862368 4774 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862374 4774 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862379 4774 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862384 4774 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862390 4774 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862399 4774 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862416 4774 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862424 4774 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862431 4774 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862438 4774 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862446 4774 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 21 14:03:29 crc kubenswrapper[4774]: W1121 14:03:29.862452 4774 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.862462 4774 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.863777 4774 server.go:940] "Client rotation is on, will bootstrap in background" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.869150 4774 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.869264 4774 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.871915 4774 server.go:997] "Starting client certificate rotation" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.871954 4774 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.873608 4774 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-24 00:03:05.22888499 +0000 UTC Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.873681 4774 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 777h59m35.355207291s for next certificate rotation Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.906592 4774 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.912833 4774 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.929007 4774 log.go:25] "Validated CRI v1 runtime API" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.971978 4774 log.go:25] "Validated CRI v1 image API" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.973832 4774 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.979891 4774 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-21-13-59-02-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.979922 4774 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.997856 4774 manager.go:217] Machine: {Timestamp:2025-11-21 14:03:29.995280413 +0000 UTC m=+0.647479682 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3 BootID:c1f65383-a207-4db6-9ed6-aa2f40413778 Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:32:0a:60 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:32:0a:60 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:6b:b1:f5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:60:ed:81 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:bd:c1:1b Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:2a:6e:08 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:a7:7e:9e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:32:53:f4:a4:c3:27 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:d6:22:46:19:a6:43 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.998321 4774 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.998536 4774 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.999448 4774 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.999662 4774 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 21 14:03:29 crc kubenswrapper[4774]: I1121 14:03:29.999743 4774 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.000015 4774 topology_manager.go:138] "Creating topology manager with none policy" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.000070 4774 container_manager_linux.go:303] "Creating device plugin manager" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.000642 4774 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.000730 4774 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.000975 4774 state_mem.go:36] "Initialized new in-memory state store" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.001120 4774 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.004854 4774 kubelet.go:418] "Attempting to sync node with API server" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.004932 4774 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.005005 4774 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.005100 4774 kubelet.go:324] "Adding apiserver pod source" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.005189 4774 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.010667 4774 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.014955 4774 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.015539 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.015565 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.015811 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.015948 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.017725 4774 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019483 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019508 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019523 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019532 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019547 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019556 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019565 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019579 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019591 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019608 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019620 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.019629 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.020349 4774 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.020799 4774 server.go:1280] "Started kubelet" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.020995 4774 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.021375 4774 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.022593 4774 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.022623 4774 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 21 14:03:30 crc systemd[1]: Started Kubernetes Kubelet. Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.024480 4774 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.024533 4774 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.024710 4774 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 12:35:32.424837163 +0000 UTC Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.024756 4774 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 190h32m2.400083749s for next certificate rotation Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.024873 4774 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.024893 4774 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.025002 4774 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.025003 4774 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.025554 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.025671 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.031175 4774 factory.go:55] Registering systemd factory Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.031951 4774 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="200ms" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.040037 4774 server.go:460] "Adding debug handlers to kubelet server" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.034588 4774 factory.go:221] Registration of the systemd container factory successfully Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.040564 4774 factory.go:153] Registering CRI-O factory Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.040604 4774 factory.go:221] Registration of the crio container factory successfully Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.040737 4774 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.040793 4774 factory.go:103] Registering Raw factory Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.040860 4774 manager.go:1196] Started watching for new ooms in manager Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.042172 4774 manager.go:319] Starting recovery of all containers Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.040617 4774 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.181:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187a0a8e38a2416d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-21 14:03:30.020761965 +0000 UTC m=+0.672961244,LastTimestamp:2025-11-21 14:03:30.020761965 +0000 UTC m=+0.672961244,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.044921 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.044970 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.044983 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.044993 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045003 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045012 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045023 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045034 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045044 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045053 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045062 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045072 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045081 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045092 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045100 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045110 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045118 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045127 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045136 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045145 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045178 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045187 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045196 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045221 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045230 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045239 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045252 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045261 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045271 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045280 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045289 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045299 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045309 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045319 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045328 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045338 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045347 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045357 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045367 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045378 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045397 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045414 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045427 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045440 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045452 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045464 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045475 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045486 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045497 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045509 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045519 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045529 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045544 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045556 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045580 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045600 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045610 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045623 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045635 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045646 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045657 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045670 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045713 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045727 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045746 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045763 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045777 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045790 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045835 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045848 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045861 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045872 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045884 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045896 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045907 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045919 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045934 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045949 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045962 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045974 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.045989 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046002 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046013 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046023 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046036 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046046 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046056 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046067 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046079 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046092 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046105 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046117 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046129 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046142 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046153 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046167 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046179 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046190 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046202 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046214 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046228 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046242 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046255 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046271 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046285 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046297 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046307 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046319 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046330 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046340 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046350 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046361 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046372 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046388 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046410 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046421 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046431 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046441 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046450 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046461 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046471 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046480 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046490 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046501 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046511 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046520 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046529 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046537 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046548 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046560 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046569 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046579 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046588 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046599 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046607 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046617 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046626 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046635 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046645 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.046656 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048037 4774 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048078 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048100 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048120 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048137 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048155 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048172 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048192 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048210 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048227 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048242 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048260 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048276 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048295 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048311 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048344 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048364 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048380 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048396 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048417 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048433 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048450 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048465 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048478 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048490 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048503 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048516 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048538 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048556 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048569 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048582 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048596 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048609 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048622 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048637 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048651 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048665 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048678 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048691 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048703 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048716 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048729 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048741 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048754 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048768 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048780 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048793 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048805 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048842 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048859 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048910 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048926 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048938 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048950 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048962 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048974 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048985 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.048997 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049011 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049024 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049036 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049048 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049060 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049075 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049088 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049099 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049113 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049125 4774 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049138 4774 reconstruct.go:97] "Volume reconstruction finished" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.049148 4774 reconciler.go:26] "Reconciler: start to sync state" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.066983 4774 manager.go:324] Recovery completed Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.082281 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.084712 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.084758 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.084774 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.085659 4774 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.085687 4774 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.085728 4774 state_mem.go:36] "Initialized new in-memory state store" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.088127 4774 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.090491 4774 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.091790 4774 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.091841 4774 kubelet.go:2335] "Starting kubelet main sync loop" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.091967 4774 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.094645 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.094759 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.098495 4774 policy_none.go:49] "None policy: Start" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.099264 4774 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.099326 4774 state_mem.go:35] "Initializing new in-memory state store" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.126185 4774 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.163354 4774 manager.go:334] "Starting Device Plugin manager" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.163652 4774 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.163679 4774 server.go:79] "Starting device plugin registration server" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.164193 4774 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.164220 4774 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.164348 4774 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.164451 4774 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.164458 4774 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.172813 4774 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.192433 4774 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.192574 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.193708 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.193744 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.193756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.193922 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.194225 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.194310 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195021 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195094 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195307 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195379 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195404 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195607 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195633 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.195649 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196450 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196475 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196486 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196498 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196500 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196669 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196867 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.196962 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.197295 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.197324 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.197335 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.197467 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.197566 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.197607 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198142 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198184 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198235 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198578 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198614 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198629 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198645 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198662 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198671 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198919 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.198976 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.199775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.199808 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.199864 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.233737 4774 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="400ms" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.251994 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252045 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252202 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252294 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252327 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252347 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252373 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252406 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252429 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252472 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252516 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252562 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252581 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.252599 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.264423 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.266133 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.266174 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.266189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.266908 4774 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.268801 4774 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354098 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354126 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354150 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354195 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354218 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354231 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354262 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354253 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354295 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354289 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354343 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354341 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354369 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354366 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354394 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354407 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354390 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354439 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354434 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354431 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354562 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354455 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354615 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354656 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354611 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354666 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354689 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354725 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.354867 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.469994 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.471387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.471423 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.471432 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.471459 4774 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.471933 4774 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.531802 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.537790 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.562264 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.571406 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.583686 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-e470bd0b7e233b5c62b809e2315fa07477be089f94d8594add5070089befdb1f WatchSource:0}: Error finding container e470bd0b7e233b5c62b809e2315fa07477be089f94d8594add5070089befdb1f: Status 404 returned error can't find the container with id e470bd0b7e233b5c62b809e2315fa07477be089f94d8594add5070089befdb1f Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.587935 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.589256 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-899a0944a030cce8f59f23e53095c525eeae280d7c58f18cf49bd456ef5e2bbe WatchSource:0}: Error finding container 899a0944a030cce8f59f23e53095c525eeae280d7c58f18cf49bd456ef5e2bbe: Status 404 returned error can't find the container with id 899a0944a030cce8f59f23e53095c525eeae280d7c58f18cf49bd456ef5e2bbe Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.604087 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-7b4543676ec752a224eebcc3060ca0c70e15bb579267fef4f222a105e8b6bbb1 WatchSource:0}: Error finding container 7b4543676ec752a224eebcc3060ca0c70e15bb579267fef4f222a105e8b6bbb1: Status 404 returned error can't find the container with id 7b4543676ec752a224eebcc3060ca0c70e15bb579267fef4f222a105e8b6bbb1 Nov 21 14:03:30 crc kubenswrapper[4774]: W1121 14:03:30.613531 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-409bbe29e7e89f900417ed2f190b06080b9c6c074561c877d31c6df0287ff668 WatchSource:0}: Error finding container 409bbe29e7e89f900417ed2f190b06080b9c6c074561c877d31c6df0287ff668: Status 404 returned error can't find the container with id 409bbe29e7e89f900417ed2f190b06080b9c6c074561c877d31c6df0287ff668 Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.634769 4774 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="800ms" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.872251 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.873494 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.873534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.873544 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:30 crc kubenswrapper[4774]: I1121 14:03:30.873569 4774 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 14:03:30 crc kubenswrapper[4774]: E1121 14:03:30.874129 4774 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Nov 21 14:03:31 crc kubenswrapper[4774]: W1121 14:03:31.013537 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:31 crc kubenswrapper[4774]: E1121 14:03:31.013674 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.023786 4774 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:31 crc kubenswrapper[4774]: W1121 14:03:31.073278 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:31 crc kubenswrapper[4774]: E1121 14:03:31.073359 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.095406 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"899a0944a030cce8f59f23e53095c525eeae280d7c58f18cf49bd456ef5e2bbe"} Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.096448 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"409bbe29e7e89f900417ed2f190b06080b9c6c074561c877d31c6df0287ff668"} Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.097333 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7b4543676ec752a224eebcc3060ca0c70e15bb579267fef4f222a105e8b6bbb1"} Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.098274 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"139c20d189dc15c259a7c97655e8b84b2034297a615e78ff66a2aca347d4dc06"} Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.099129 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e470bd0b7e233b5c62b809e2315fa07477be089f94d8594add5070089befdb1f"} Nov 21 14:03:31 crc kubenswrapper[4774]: W1121 14:03:31.159393 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:31 crc kubenswrapper[4774]: E1121 14:03:31.159529 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:31 crc kubenswrapper[4774]: E1121 14:03:31.436690 4774 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="1.6s" Nov 21 14:03:31 crc kubenswrapper[4774]: W1121 14:03:31.651778 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:31 crc kubenswrapper[4774]: E1121 14:03:31.651861 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.674467 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.675472 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.675521 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.675534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:31 crc kubenswrapper[4774]: I1121 14:03:31.675555 4774 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 14:03:31 crc kubenswrapper[4774]: E1121 14:03:31.675982 4774 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.023678 4774 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.104355 4774 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070" exitCode=0 Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.104439 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070"} Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.104492 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.105990 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.106046 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.106080 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.106650 4774 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b9b3679ff721b506bdc69a59e5e6966b9782d9a318cd229cf058ac720bceb65a" exitCode=0 Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.106732 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b9b3679ff721b506bdc69a59e5e6966b9782d9a318cd229cf058ac720bceb65a"} Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.106794 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.108168 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.108201 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.108215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.109076 4774 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="7de89a56ea1de3ce8a96253dbcd31024728759e58e5a8c2bf0c845497c6f47dd" exitCode=0 Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.109142 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.109200 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"7de89a56ea1de3ce8a96253dbcd31024728759e58e5a8c2bf0c845497c6f47dd"} Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.109142 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.110532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.110557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.110569 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.110889 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.110925 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.110939 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.111230 4774 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573" exitCode=0 Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.111310 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.111331 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573"} Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.112248 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.112276 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.112291 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.114691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998"} Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.114734 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6"} Nov 21 14:03:32 crc kubenswrapper[4774]: I1121 14:03:32.114754 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.024360 4774 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:33 crc kubenswrapper[4774]: E1121 14:03:33.038022 4774 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.181:6443: connect: connection refused" interval="3.2s" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.118946 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.119029 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.120597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.120662 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.120705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.121540 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.121593 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.121608 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.122937 4774 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9e2e38d7efe1e4ada3b421dbb47abeb4eb3417e096b2e8914675163d69c3df08" exitCode=0 Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.122975 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9e2e38d7efe1e4ada3b421dbb47abeb4eb3417e096b2e8914675163d69c3df08"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.123113 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.124220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.124258 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.124270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.126078 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.126083 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"53ebc6440be16bb65dec8507dcec929d05acc550d545ab97897cc72e8b78728e"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.126991 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.127025 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.127035 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.129333 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.129379 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.129404 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.129419 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22"} Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.130086 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.130120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.130130 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:33 crc kubenswrapper[4774]: W1121 14:03:33.191099 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:33 crc kubenswrapper[4774]: E1121 14:03:33.191174 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.276364 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.277673 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.277720 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.277730 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:33 crc kubenswrapper[4774]: I1121 14:03:33.277765 4774 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 14:03:33 crc kubenswrapper[4774]: E1121 14:03:33.278494 4774 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.181:6443: connect: connection refused" node="crc" Nov 21 14:03:33 crc kubenswrapper[4774]: W1121 14:03:33.643520 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:33 crc kubenswrapper[4774]: E1121 14:03:33.643617 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:33 crc kubenswrapper[4774]: W1121 14:03:33.889110 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:33 crc kubenswrapper[4774]: E1121 14:03:33.889221 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:33 crc kubenswrapper[4774]: W1121 14:03:33.948351 4774 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:33 crc kubenswrapper[4774]: E1121 14:03:33.948476 4774 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.181:6443: connect: connection refused" logger="UnhandledError" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.023732 4774 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.181:6443: connect: connection refused Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.109951 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.134117 4774 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="41d08de6615afa8b7559b50734c114546f51cccb2255bad507a58ded880eb424" exitCode=0 Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.134177 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"41d08de6615afa8b7559b50734c114546f51cccb2255bad507a58ded880eb424"} Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.134230 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.135243 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.135270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.135280 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.139380 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2"} Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.139414 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b"} Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.139445 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.139502 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.139537 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.139631 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.140927 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.140949 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.140960 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141099 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141117 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141153 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141161 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141282 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.141297 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.166212 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:34 crc kubenswrapper[4774]: I1121 14:03:34.182152 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.145597 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.146219 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"745749aa8f2d100f5230d5832b51dc3cefe56c1574fc4e9471a6a26fe92d20b6"} Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.146264 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1013e632d33c3fc793f0d300057ba4e6f5d9a0e64918ee4d30ccef681e30ed31"} Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.146283 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d2670cfdaa01722ec3dc500fc37686a0e697f697fcec4bc8fc4e9353b3696ff0"} Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.146387 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.146834 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.146865 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.147329 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.147359 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.147369 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.148039 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.148069 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.148079 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.148740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.148763 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.148773 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:35 crc kubenswrapper[4774]: I1121 14:03:35.313099 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.153145 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.153982 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.153186 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.153128 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"caa45d7f18c6249861d19436f2485af14fcdb827733dbc7dbdb98237380ce122"} Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.154298 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0771517e6b110b86a46fac953bb40e60415f85d35ad1b3105dd7a6c9168382ae"} Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.155012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.155046 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.155059 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.155406 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.155437 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.155447 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.478899 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.480268 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.480304 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.480312 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:36 crc kubenswrapper[4774]: I1121 14:03:36.480337 4774 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.156183 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.156246 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.156255 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.157567 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.157619 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.157569 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.157666 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.157690 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.157637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.406534 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.406689 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.406723 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.407982 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.408017 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.408030 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.492541 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.643785 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.813502 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:37 crc kubenswrapper[4774]: I1121 14:03:37.828135 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.158420 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.158471 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.158471 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.159961 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.159993 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160004 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160178 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160134 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160294 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:38 crc kubenswrapper[4774]: I1121 14:03:38.160322 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:39 crc kubenswrapper[4774]: I1121 14:03:39.161083 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:39 crc kubenswrapper[4774]: I1121 14:03:39.162459 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:39 crc kubenswrapper[4774]: I1121 14:03:39.162489 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:39 crc kubenswrapper[4774]: I1121 14:03:39.162507 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:40 crc kubenswrapper[4774]: E1121 14:03:40.173032 4774 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 14:03:42 crc kubenswrapper[4774]: I1121 14:03:42.725837 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:42 crc kubenswrapper[4774]: I1121 14:03:42.726027 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:42 crc kubenswrapper[4774]: I1121 14:03:42.727578 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:42 crc kubenswrapper[4774]: I1121 14:03:42.727637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:42 crc kubenswrapper[4774]: I1121 14:03:42.727653 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:42 crc kubenswrapper[4774]: I1121 14:03:42.733614 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:43 crc kubenswrapper[4774]: I1121 14:03:43.189758 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:43 crc kubenswrapper[4774]: I1121 14:03:43.190932 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:43 crc kubenswrapper[4774]: I1121 14:03:43.190971 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:43 crc kubenswrapper[4774]: I1121 14:03:43.190980 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.024707 4774 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.200338 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.205753 4774 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2" exitCode=255 Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.205838 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2"} Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.206050 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.207231 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.207259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.207270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.207735 4774 scope.go:117] "RemoveContainer" containerID="d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.656622 4774 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.657203 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.663356 4774 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.663439 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.725847 4774 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.725910 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.956136 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.956725 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.959440 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.959495 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:45 crc kubenswrapper[4774]: I1121 14:03:45.959509 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.054380 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.210404 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.212910 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03"} Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.213010 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.213147 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.214190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.214262 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.214278 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.214518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.214558 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.214572 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:46 crc kubenswrapper[4774]: I1121 14:03:46.230501 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.216035 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.217564 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.217617 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.217627 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.413374 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.413630 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.413738 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.415261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.415321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.415347 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:47 crc kubenswrapper[4774]: I1121 14:03:47.419587 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:48 crc kubenswrapper[4774]: I1121 14:03:48.218391 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:48 crc kubenswrapper[4774]: I1121 14:03:48.219482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:48 crc kubenswrapper[4774]: I1121 14:03:48.219511 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:48 crc kubenswrapper[4774]: I1121 14:03:48.219521 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:49 crc kubenswrapper[4774]: I1121 14:03:49.220407 4774 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 21 14:03:49 crc kubenswrapper[4774]: I1121 14:03:49.221184 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:49 crc kubenswrapper[4774]: I1121 14:03:49.221214 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:49 crc kubenswrapper[4774]: I1121 14:03:49.221221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.173146 4774 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.654467 4774 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.656869 4774 trace.go:236] Trace[577607878]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Nov-2025 14:03:39.267) (total time: 11389ms): Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[577607878]: ---"Objects listed" error: 11389ms (14:03:50.656) Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[577607878]: [11.389351475s] [11.389351475s] END Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.656909 4774 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.657912 4774 trace.go:236] Trace[303811412]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Nov-2025 14:03:39.474) (total time: 11183ms): Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[303811412]: ---"Objects listed" error: 11183ms (14:03:50.657) Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[303811412]: [11.183271638s] [11.183271638s] END Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.657948 4774 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.658057 4774 trace.go:236] Trace[1828412023]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Nov-2025 14:03:37.369) (total time: 13288ms): Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[1828412023]: ---"Objects listed" error: 13288ms (14:03:50.657) Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[1828412023]: [13.288507311s] [13.288507311s] END Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.658082 4774 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.661414 4774 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.662114 4774 trace.go:236] Trace[358731925]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (21-Nov-2025 14:03:39.809) (total time: 10853ms): Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[358731925]: ---"Objects listed" error: 10852ms (14:03:50.661) Nov 21 14:03:50 crc kubenswrapper[4774]: Trace[358731925]: [10.853019021s] [10.853019021s] END Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.662140 4774 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.670281 4774 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.670380 4774 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.671472 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.671518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.671530 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.671556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.671569 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.694051 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.697976 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.698010 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.698019 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.698036 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.698046 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.716412 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.725079 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.725143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.725158 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.725182 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.725194 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.735277 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.740588 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.740623 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.740636 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.740659 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.740670 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.751188 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.755644 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.755691 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.755701 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.755720 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.755732 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.766769 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:50Z\\\",\\\"message\\\":\\\"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:50 crc kubenswrapper[4774]: E1121 14:03:50.766902 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.769100 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.769138 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.769152 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.769172 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.769183 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.871976 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.872047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.872061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.872093 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.872109 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.974540 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.974591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.974603 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.974627 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:50 crc kubenswrapper[4774]: I1121 14:03:50.974643 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:50Z","lastTransitionTime":"2025-11-21T14:03:50Z","reason":"KubeletNotReady","message":"[container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?, CSINode is not yet initialized]"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.019580 4774 apiserver.go:52] "Watching apiserver" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.022680 4774 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.023174 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-gn24l","openshift-machine-config-operator/machine-config-daemon-jtxgb","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.023555 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.023649 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.023681 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.023706 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.023747 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.024342 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.024540 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.024890 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.024907 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.024948 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.025052 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.025078 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.025851 4774 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.028447 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.029284 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.030146 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.030389 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.031502 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.031791 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.032191 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.032395 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.032694 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.032746 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.033182 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.034149 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.034245 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.041738 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.042142 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.042342 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063242 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063296 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063330 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063351 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063373 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063395 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063417 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063466 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063487 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063505 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063531 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063546 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063566 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063583 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063603 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063619 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063640 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063682 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063698 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063712 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063727 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063743 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063759 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063791 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063810 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063841 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063857 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063872 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063889 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063903 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063919 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063936 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063952 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063967 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063997 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064012 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064026 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064043 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064058 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064072 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064088 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064105 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064122 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064138 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064155 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064174 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064209 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064226 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064246 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064262 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064278 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064293 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064313 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064336 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064353 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064392 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064410 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064427 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064443 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064459 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064476 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064492 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064508 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064524 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064552 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064569 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064584 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064599 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064621 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064640 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064658 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064675 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064690 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064710 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064726 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064741 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064756 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064773 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064787 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064803 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064834 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064850 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064866 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064882 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064898 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064916 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064931 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064948 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064965 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064981 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064997 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065012 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065028 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065044 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065059 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065074 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065091 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065106 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065121 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065135 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065150 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065164 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065181 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065197 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065217 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065241 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065257 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065274 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065289 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065308 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065330 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065349 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065368 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065384 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065402 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065419 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065435 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065453 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065472 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065493 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065512 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065529 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065616 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065636 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065653 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065669 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065686 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065702 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065720 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065737 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065754 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065771 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065786 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065803 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065835 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065851 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065875 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065892 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065907 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065933 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065951 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065967 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065983 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066000 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066019 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066040 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066059 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066077 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066094 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066117 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066134 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066152 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066169 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066185 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066202 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066218 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066234 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066251 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066273 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066290 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066309 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066334 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066357 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066379 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066395 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066412 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066429 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066446 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066467 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066483 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066500 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066515 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066531 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066581 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066597 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066614 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066632 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066650 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066668 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066684 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066699 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066718 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066735 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066752 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066776 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066794 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066811 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066848 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066866 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066885 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066905 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066923 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066940 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066958 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066976 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067015 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-proxy-tls\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067036 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-mcd-auth-proxy-config\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067055 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s29s9\" (UniqueName: \"kubernetes.io/projected/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-kube-api-access-s29s9\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067080 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067114 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067135 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9zv9\" (UniqueName: \"kubernetes.io/projected/a7381e8b-9fee-4279-84e7-e3e51eecf0f8-kube-api-access-b9zv9\") pod \"node-resolver-gn24l\" (UID: \"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\") " pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067155 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067174 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071368 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071407 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071432 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071498 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071538 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7381e8b-9fee-4279-84e7-e3e51eecf0f8-hosts-file\") pod \"node-resolver-gn24l\" (UID: \"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\") " pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071574 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071776 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071812 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071856 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071876 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071894 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071914 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-rootfs\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063634 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063776 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.063947 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064708 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.064873 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074172 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065205 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.065714 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066003 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066067 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066205 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066350 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066404 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066497 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066793 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.066845 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067023 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067287 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067318 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067498 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067468 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067507 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067528 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067716 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067861 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.067936 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068179 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068209 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074294 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068491 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068571 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068750 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068766 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068856 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068907 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.068994 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069033 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069300 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069350 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069405 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069469 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069546 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069546 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069737 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069932 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069952 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.069915 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070004 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070019 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070218 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070245 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070510 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070530 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070663 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070732 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070775 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070796 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.070944 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071156 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071176 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.071627 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.072370 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.072884 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073057 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073168 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073251 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073398 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073419 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073429 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073770 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.073858 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074095 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074200 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074503 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074502 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.074523 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.077247 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.078943 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.079131 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.079361 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.080470 4774 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.081968 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.082296 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.082672 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.083017 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084076 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084142 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084156 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084922 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084967 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.084983 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.085287 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.085354 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.085667 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.085667 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.082953 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.085953 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.086314 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.086664 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.087206 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.087388 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.087481 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.087592 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.087904 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.088263 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.088366 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.089266 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.092890 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.094379 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.094654 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.094811 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.094910 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:51.594887838 +0000 UTC m=+22.247087287 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.094979 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:03:51.594932399 +0000 UTC m=+22.247131658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.095154 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.095213 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:51.595202066 +0000 UTC m=+22.247401525 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.095332 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.095593 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.095750 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.095919 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.096070 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.096501 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.096694 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.096692 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.096993 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.097471 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.097855 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098008 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098169 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098115 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098230 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098318 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098451 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098661 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.098667 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.100670 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.101039 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.101042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.102053 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.102180 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.102201 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.102214 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.102419 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102565 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102585 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102604 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102677 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:51.602658686 +0000 UTC m=+22.254858145 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.102715 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102753 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102885 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.102925 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.103131 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:51.603084728 +0000 UTC m=+22.255283987 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.103015 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.103665 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.103681 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.105119 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.105487 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.105679 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.105736 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.106603 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.107480 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.109638 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.109908 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.110232 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.110992 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.111672 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.111969 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.112011 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.112881 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.113421 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.114248 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.114393 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.114415 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.115296 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.115347 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.115319 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.115494 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.116473 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.116614 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.118309 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.118788 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.120943 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.121171 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.124128 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.124164 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.125253 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.125337 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.125388 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.125733 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.126088 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.126360 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.126412 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.126572 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.128046 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.128618 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.128780 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.128967 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.129139 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.129801 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.132898 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.133317 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.133377 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.128021 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.136630 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.136758 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.137512 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.137692 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.137960 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.138059 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.139994 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.142374 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.142399 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.142590 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.142693 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.143387 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.142763 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.143514 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.145190 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.145413 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.146353 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.149544 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.153608 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.161692 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.164630 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.172794 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9zv9\" (UniqueName: \"kubernetes.io/projected/a7381e8b-9fee-4279-84e7-e3e51eecf0f8-kube-api-access-b9zv9\") pod \"node-resolver-gn24l\" (UID: \"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\") " pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.172863 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.172884 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.172932 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7381e8b-9fee-4279-84e7-e3e51eecf0f8-hosts-file\") pod \"node-resolver-gn24l\" (UID: \"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\") " pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173004 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-rootfs\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173025 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-proxy-tls\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173043 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-mcd-auth-proxy-config\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173061 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s29s9\" (UniqueName: \"kubernetes.io/projected/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-kube-api-access-s29s9\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173118 4774 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173128 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173137 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173147 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173156 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173164 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173175 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173184 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173192 4774 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173201 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173209 4774 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173218 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173227 4774 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173236 4774 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173245 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173254 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173263 4774 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173273 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173282 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173290 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173301 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173310 4774 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173319 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173327 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173335 4774 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173343 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173351 4774 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173360 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173368 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173376 4774 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173384 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173393 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173401 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173410 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173418 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173428 4774 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173441 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173450 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173460 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173469 4774 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173477 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173485 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173494 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173502 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173510 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173518 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173526 4774 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173534 4774 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173542 4774 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173550 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173561 4774 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173575 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173587 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173597 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173608 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173620 4774 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173630 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173640 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173651 4774 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173661 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173671 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173681 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173690 4774 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173700 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173714 4774 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173726 4774 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173737 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173748 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173759 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173771 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173782 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173794 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173805 4774 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173832 4774 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173844 4774 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173855 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173866 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173878 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173889 4774 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173911 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173921 4774 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173932 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173943 4774 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173955 4774 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173966 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173976 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173987 4774 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.173999 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174009 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174021 4774 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174080 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174093 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174101 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174111 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174121 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174129 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174140 4774 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174148 4774 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174156 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174165 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174174 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174182 4774 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174191 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174200 4774 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174208 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174217 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174225 4774 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174233 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174241 4774 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174249 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174257 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174266 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174274 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174282 4774 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174295 4774 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174303 4774 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174335 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174343 4774 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174352 4774 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174361 4774 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174369 4774 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174378 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174387 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174427 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174437 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174447 4774 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174458 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174486 4774 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174498 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174508 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174518 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174529 4774 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174541 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174557 4774 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174570 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174583 4774 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174596 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174607 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174620 4774 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174631 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174643 4774 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174654 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174666 4774 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174677 4774 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174688 4774 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174698 4774 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174714 4774 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174728 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174737 4774 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174748 4774 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174756 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174764 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174773 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174785 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174793 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174801 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174809 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174846 4774 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174858 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174869 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174880 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174893 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174903 4774 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174913 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174922 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174932 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174940 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174947 4774 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174957 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174965 4774 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174973 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174982 4774 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174990 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174999 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175009 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175021 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175033 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175045 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175061 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175073 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175083 4774 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175095 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175106 4774 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175173 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-mcd-auth-proxy-config\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175186 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175243 4774 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175255 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175266 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175278 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175287 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175297 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175306 4774 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175316 4774 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.174544 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-rootfs\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175597 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175655 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.175773 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/a7381e8b-9fee-4279-84e7-e3e51eecf0f8-hosts-file\") pod \"node-resolver-gn24l\" (UID: \"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\") " pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.176691 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.176946 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.182775 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.185835 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-proxy-tls\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.189071 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.191325 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.191364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.191376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.191396 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.191410 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.192415 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s29s9\" (UniqueName: \"kubernetes.io/projected/3eb06dc6-b3cb-44b8-ba08-69bfac3661bd-kube-api-access-s29s9\") pod \"machine-config-daemon-jtxgb\" (UID: \"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\") " pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.194433 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9zv9\" (UniqueName: \"kubernetes.io/projected/a7381e8b-9fee-4279-84e7-e3e51eecf0f8-kube-api-access-b9zv9\") pod \"node-resolver-gn24l\" (UID: \"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\") " pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.204099 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.223257 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.233953 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.252616 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-hdxzw"] Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.253074 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-q452c"] Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.253232 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.254143 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rltf4"] Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.255034 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.255178 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.255367 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.255611 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.258015 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.258327 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 21 14:03:51 crc kubenswrapper[4774]: W1121 14:03:51.258338 4774 reflector.go:561] object-"openshift-ovn-kubernetes"/"ovnkube-config": failed to list *v1.ConfigMap: configmaps "ovnkube-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.258533 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"ovnkube-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"ovnkube-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.258880 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 21 14:03:51 crc kubenswrapper[4774]: W1121 14:03:51.258908 4774 reflector.go:561] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-ovn-kubernetes": no relationship found between node 'crc' and this object Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.258943 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-ovn-kubernetes\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-ovn-kubernetes\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.259034 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.260220 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.261407 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.261627 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.261662 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.261766 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.261973 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.269192 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.275985 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.276022 4774 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.282703 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.298977 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.299035 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.299051 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.299074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.299092 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.302443 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.323192 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.336215 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.346541 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.357537 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.357630 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: W1121 14:03:51.364166 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-6497365fadf3e03658a4165d2141bf11bae48f28131582d2eebb8862cd47fa22 WatchSource:0}: Error finding container 6497365fadf3e03658a4165d2141bf11bae48f28131582d2eebb8862cd47fa22: Status 404 returned error can't find the container with id 6497365fadf3e03658a4165d2141bf11bae48f28131582d2eebb8862cd47fa22 Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.364938 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-gn24l" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376631 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-conf-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376690 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkrjk\" (UniqueName: \"kubernetes.io/projected/4057b5ee-926e-4931-b5a0-2c204d18ce72-kube-api-access-fkrjk\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376714 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-netns\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376732 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-bin\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376748 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-cni-binary-copy\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376767 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-etc-kubernetes\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376897 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-etc-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376954 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/48241a35-9491-44a3-aeef-5bd2424490a8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.376989 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-kubelet\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377008 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-systemd-units\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377031 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377056 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovn-node-metrics-cert\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377074 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-cni-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377098 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-daemon-config\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377124 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-systemd\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377144 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-script-lib\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377219 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-cnibin\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377269 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-cni-bin\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377332 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-log-socket\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377369 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-env-overrides\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377395 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwdmg\" (UniqueName: \"kubernetes.io/projected/48241a35-9491-44a3-aeef-5bd2424490a8-kube-api-access-rwdmg\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377423 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-k8s-cni-cncf-io\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377459 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-hostroot\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377482 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-cnibin\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377509 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-os-release\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377531 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-system-cni-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-kubelet\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377587 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-netns\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377613 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkbqf\" (UniqueName: \"kubernetes.io/projected/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-kube-api-access-kkbqf\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377639 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-var-lib-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377666 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-ovn\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377699 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-netd\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377749 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-node-log\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377773 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-os-release\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377843 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-socket-dir-parent\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377882 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-cni-multus\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377920 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-slash\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377944 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-ovn-kubernetes\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377969 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/48241a35-9491-44a3-aeef-5bd2424490a8-cni-binary-copy\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.377997 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-multus-certs\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.378023 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.378046 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-config\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.378070 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-system-cni-dir\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.384180 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.397895 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.400152 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.402018 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.402102 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.402119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.402143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.402159 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.410682 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: W1121 14:03:51.414673 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3eb06dc6_b3cb_44b8_ba08_69bfac3661bd.slice/crio-847462f0dfddfbbbbfccc4b9122230b974187a36745ed29bc3c80dd65f764a79 WatchSource:0}: Error finding container 847462f0dfddfbbbbfccc4b9122230b974187a36745ed29bc3c80dd65f764a79: Status 404 returned error can't find the container with id 847462f0dfddfbbbbfccc4b9122230b974187a36745ed29bc3c80dd65f764a79 Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.428957 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.437184 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.441276 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.456520 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: W1121 14:03:51.456684 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-83d4d8e92cc6f32020f1b4f141ea9ed0b8271a7d4aaef8c580dac9a8b4dcaf5e WatchSource:0}: Error finding container 83d4d8e92cc6f32020f1b4f141ea9ed0b8271a7d4aaef8c580dac9a8b4dcaf5e: Status 404 returned error can't find the container with id 83d4d8e92cc6f32020f1b4f141ea9ed0b8271a7d4aaef8c580dac9a8b4dcaf5e Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.471670 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.478809 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-netns\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.478878 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-cnibin\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.478905 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-os-release\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.478913 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-netns\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.478930 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-system-cni-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.478988 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-kubelet\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479011 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkbqf\" (UniqueName: \"kubernetes.io/projected/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-kube-api-access-kkbqf\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479026 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-var-lib-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479038 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-system-cni-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479060 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-ovn\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479042 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-ovn\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479086 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-kubelet\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479172 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-netd\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479212 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-node-log\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479236 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-slash\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479282 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-ovn-kubernetes\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479304 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/48241a35-9491-44a3-aeef-5bd2424490a8-cni-binary-copy\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479426 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-netd\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479437 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479453 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-cnibin\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479476 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-os-release\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479498 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-os-release\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479504 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-socket-dir-parent\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479531 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-cni-multus\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479566 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-ovn-kubernetes\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479562 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-multus-certs\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479722 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-config\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479744 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-system-cni-dir\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479403 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-var-lib-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479792 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-os-release\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479616 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-multus-certs\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479863 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-node-log\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479865 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-cni-multus\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479887 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-slash\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479941 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-system-cni-dir\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.479744 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-socket-dir-parent\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480086 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/48241a35-9491-44a3-aeef-5bd2424490a8-cni-binary-copy\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480102 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480124 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-conf-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480263 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-conf-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480270 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkrjk\" (UniqueName: \"kubernetes.io/projected/4057b5ee-926e-4931-b5a0-2c204d18ce72-kube-api-access-fkrjk\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480291 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-netns\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480346 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480430 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-etc-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480448 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-bin\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480552 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-etc-openvswitch\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480468 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-cni-binary-copy\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480595 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-bin\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480602 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-etc-kubernetes\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480621 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-kubelet\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480637 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-systemd-units\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480787 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/48241a35-9491-44a3-aeef-5bd2424490a8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480842 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-etc-kubernetes\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480580 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-netns\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480887 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-kubelet\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480926 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480947 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovn-node-metrics-cert\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.480963 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-cni-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481018 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481043 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-systemd-units\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481081 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-daemon-config\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481089 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-cni-binary-copy\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481101 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-systemd\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481120 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-script-lib\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481207 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-cni-dir\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481239 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-cnibin\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481259 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-cni-bin\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.481287 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-env-overrides\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.482217 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-script-lib\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.483418 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-systemd\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.483409 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/48241a35-9491-44a3-aeef-5bd2424490a8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.483484 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-cnibin\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.483501 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-var-lib-cni-bin\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.483559 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-multus-daemon-config\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.484934 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwdmg\" (UniqueName: \"kubernetes.io/projected/48241a35-9491-44a3-aeef-5bd2424490a8-kube-api-access-rwdmg\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.485335 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-k8s-cni-cncf-io\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.485354 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-log-socket\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.485414 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-host-run-k8s-cni-cncf-io\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.485493 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-log-socket\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.485546 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-hostroot\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.485625 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-hostroot\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.487865 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/48241a35-9491-44a3-aeef-5bd2424490a8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.491779 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-env-overrides\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.491922 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovn-node-metrics-cert\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.493341 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.498834 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkbqf\" (UniqueName: \"kubernetes.io/projected/0bf8b868-6e71-4073-a9ad-e2ac8ae15215-kube-api-access-kkbqf\") pod \"multus-hdxzw\" (UID: \"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\") " pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.500339 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwdmg\" (UniqueName: \"kubernetes.io/projected/48241a35-9491-44a3-aeef-5bd2424490a8-kube-api-access-rwdmg\") pod \"multus-additional-cni-plugins-q452c\" (UID: \"48241a35-9491-44a3-aeef-5bd2424490a8\") " pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.504891 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.504926 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.504937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.504959 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.504972 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.505190 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.514831 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.526248 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.538063 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.557366 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.566342 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hdxzw" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.580765 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-q452c" Nov 21 14:03:51 crc kubenswrapper[4774]: W1121 14:03:51.582290 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0bf8b868_6e71_4073_a9ad_e2ac8ae15215.slice/crio-d24d1da192978f19f072559550634907628dfe03a36e1c526ff35c684a84aa36 WatchSource:0}: Error finding container d24d1da192978f19f072559550634907628dfe03a36e1c526ff35c684a84aa36: Status 404 returned error can't find the container with id d24d1da192978f19f072559550634907628dfe03a36e1c526ff35c684a84aa36 Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.607633 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.607660 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.607673 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.607690 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.607702 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.688553 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.688696 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.688737 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.688782 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.688808 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.688937 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.688998 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:52.688981039 +0000 UTC m=+23.341180298 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689362 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:03:52.689348999 +0000 UTC m=+23.341548258 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689446 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689464 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689476 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689505 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:52.689496723 +0000 UTC m=+23.341695982 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689558 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689572 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689582 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689615 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:52.689605866 +0000 UTC m=+23.341805125 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689747 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: E1121 14:03:51.689782 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:52.689772401 +0000 UTC m=+23.341971650 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.714655 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.714693 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.714702 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.714718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.714727 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.817178 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.817238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.817250 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.817269 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.817282 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.919778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.919831 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.919842 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.919856 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:51 crc kubenswrapper[4774]: I1121 14:03:51.919866 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:51Z","lastTransitionTime":"2025-11-21T14:03:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.022081 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.022131 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.022141 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.022155 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.022165 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.097020 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.097912 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.098848 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.099580 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.100299 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.100894 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.101544 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.102202 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.102907 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.103465 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.104044 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.104753 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.105728 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.106430 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.110877 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.111554 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.114438 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.115068 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.115786 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.117919 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.118701 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.119430 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.120532 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.121351 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.122491 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.123355 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.124791 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.125453 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.126102 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.126140 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.126150 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.126169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.126181 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.126765 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.127545 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.128106 4774 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.128231 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.130583 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.131208 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.132484 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.134457 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.135295 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.136415 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.137308 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.138674 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.139492 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.140772 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.141628 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.142947 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.143466 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.144593 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.145367 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.146726 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.147429 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.148604 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.149267 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.150129 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.151451 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.152096 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.228115 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.228147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.228157 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.228175 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.228186 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.236893 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.236954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.236973 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"847462f0dfddfbbbbfccc4b9122230b974187a36745ed29bc3c80dd65f764a79"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.239075 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gn24l" event={"ID":"a7381e8b-9fee-4279-84e7-e3e51eecf0f8","Type":"ContainerStarted","Data":"16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.239106 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-gn24l" event={"ID":"a7381e8b-9fee-4279-84e7-e3e51eecf0f8","Type":"ContainerStarted","Data":"33ffa8ee200b8268d5803a64637a16bbb9a0e4e5dbedab36af9a8858854546b3"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.240834 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.240862 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6497365fadf3e03658a4165d2141bf11bae48f28131582d2eebb8862cd47fa22"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.251389 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.252540 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.254005 4774 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03" exitCode=255 Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.254060 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.254118 4774 scope.go:117] "RemoveContainer" containerID="d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.256300 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerStarted","Data":"258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.256329 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerStarted","Data":"d24d1da192978f19f072559550634907628dfe03a36e1c526ff35c684a84aa36"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.256906 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"83d4d8e92cc6f32020f1b4f141ea9ed0b8271a7d4aaef8c580dac9a8b4dcaf5e"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.258799 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.258850 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.258865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e8ba7cb9ea22ac7bc3ea7fb89ae21cadc143ab99a70115539bb137321adfdadc"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.260403 4774 generic.go:334] "Generic (PLEG): container finished" podID="48241a35-9491-44a3-aeef-5bd2424490a8" containerID="bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07" exitCode=0 Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.260439 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerDied","Data":"bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.260459 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerStarted","Data":"df863269c3308b1081b59f5e2325443aad73ae77201514512bb93799be1eec3c"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.261949 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.266982 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.270703 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-config\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.279418 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.282067 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.282167 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.283186 4774 scope.go:117] "RemoveContainer" containerID="e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03" Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.283464 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.293301 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.296808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkrjk\" (UniqueName: \"kubernetes.io/projected/4057b5ee-926e-4931-b5a0-2c204d18ce72-kube-api-access-fkrjk\") pod \"ovnkube-node-rltf4\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.310779 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.326392 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.331120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.331169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.331186 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.331203 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.331213 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.342051 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.356348 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.371574 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.385806 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.398969 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.416868 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.435811 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.435890 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.435906 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.435926 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.435937 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.440299 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.457125 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:45Z\\\",\\\"message\\\":\\\"W1121 14:03:34.301692 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1121 14:03:34.303140 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763733814 cert, and key in /tmp/serving-cert-2163981859/serving-signer.crt, /tmp/serving-cert-2163981859/serving-signer.key\\\\nI1121 14:03:34.775263 1 observer_polling.go:159] Starting file observer\\\\nW1121 14:03:34.778753 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1121 14:03:34.779028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:34.779977 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2163981859/tls.crt::/tmp/serving-cert-2163981859/tls.key\\\\\\\"\\\\nF1121 14:03:45.053450 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.470351 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.474059 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.485694 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.504700 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.521092 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.534168 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.538548 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.538598 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.538615 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.538638 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.538652 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.553497 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.572972 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.587512 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.601380 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.617936 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.642537 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.642590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.642603 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.642622 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.642635 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.699131 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699258 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:03:54.69922545 +0000 UTC m=+25.351424709 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.699370 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.699401 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.699422 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.699439 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699500 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699536 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699549 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699560 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699548 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:54.699540988 +0000 UTC m=+25.351740247 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699592 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:54.699585089 +0000 UTC m=+25.351784348 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699592 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699643 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699674 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699678 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:54.699659041 +0000 UTC m=+25.351858350 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699687 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:52 crc kubenswrapper[4774]: E1121 14:03:52.699740 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:54.699731493 +0000 UTC m=+25.351930752 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.730616 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.736502 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.741342 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.745108 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.745140 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.745198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.745221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.745235 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.750204 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.769612 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.783861 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.801117 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.813585 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.824664 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.839738 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.848103 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.848166 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.848180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.848201 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.848212 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.852701 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.871874 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.887299 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:45Z\\\",\\\"message\\\":\\\"W1121 14:03:34.301692 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1121 14:03:34.303140 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763733814 cert, and key in /tmp/serving-cert-2163981859/serving-signer.crt, /tmp/serving-cert-2163981859/serving-signer.key\\\\nI1121 14:03:34.775263 1 observer_polling.go:159] Starting file observer\\\\nW1121 14:03:34.778753 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1121 14:03:34.779028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:34.779977 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2163981859/tls.crt::/tmp/serving-cert-2163981859/tls.key\\\\\\\"\\\\nF1121 14:03:45.053450 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.899582 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.912873 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.928595 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d7ff847462546fc640b21534b6f95a839ebaf53c3d7f6c4c473a32de447cc6a2\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:45Z\\\",\\\"message\\\":\\\"W1121 14:03:34.301692 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1121 14:03:34.303140 1 crypto.go:601] Generating new CA for check-endpoints-signer@1763733814 cert, and key in /tmp/serving-cert-2163981859/serving-signer.crt, /tmp/serving-cert-2163981859/serving-signer.key\\\\nI1121 14:03:34.775263 1 observer_polling.go:159] Starting file observer\\\\nW1121 14:03:34.778753 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1121 14:03:34.779028 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:34.779977 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2163981859/tls.crt::/tmp/serving-cert-2163981859/tls.key\\\\\\\"\\\\nF1121 14:03:45.053450 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.943314 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.950180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.950221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.950234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.950251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.950265 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:52Z","lastTransitionTime":"2025-11-21T14:03:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.955759 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.974200 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:52 crc kubenswrapper[4774]: I1121 14:03:52.990766 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:52Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.004399 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.026335 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.041957 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.053069 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.053108 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.053120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.053139 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.053151 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.056654 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.075158 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.092800 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.092835 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.092849 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:53 crc kubenswrapper[4774]: E1121 14:03:53.092926 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:03:53 crc kubenswrapper[4774]: E1121 14:03:53.093062 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:03:53 crc kubenswrapper[4774]: E1121 14:03:53.093135 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.095470 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.109623 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.155531 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.155722 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.155741 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.155751 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.155765 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.155776 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.257738 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.257773 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.257785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.257800 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.257811 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.263877 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.270066 4774 scope.go:117] "RemoveContainer" containerID="e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03" Nov 21 14:03:53 crc kubenswrapper[4774]: E1121 14:03:53.271076 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.272878 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5" exitCode=0 Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.273484 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.273595 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"0e668ad61bd274297738a655d5cce5b29ad65721908cfc53df45db709452ddf1"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.276166 4774 generic.go:334] "Generic (PLEG): container finished" podID="48241a35-9491-44a3-aeef-5bd2424490a8" containerID="f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2" exitCode=0 Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.276777 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerDied","Data":"f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2"} Nov 21 14:03:53 crc kubenswrapper[4774]: E1121 14:03:53.285892 4774 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.298385 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.330500 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.343132 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.359888 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.361107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.361148 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.361160 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.361176 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.361187 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.373202 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.421622 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.457669 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.467442 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.467533 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.467558 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.467579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.467594 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.489919 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.532868 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.576289 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.576358 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.576371 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.576389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.576683 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.576746 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.612327 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.661055 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.678591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.678633 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.678646 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.678662 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.678672 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.696388 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.731143 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.774048 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.781662 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.781704 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.781715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.781735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.781749 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.812745 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.852946 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.884513 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.884556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.884567 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.884585 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.884595 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.890742 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.932529 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.971275 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.987485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.987534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.987547 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.987567 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:53 crc kubenswrapper[4774]: I1121 14:03:53.987582 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:53Z","lastTransitionTime":"2025-11-21T14:03:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.010344 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.050512 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.090425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.090466 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.090479 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.090497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.090510 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.093483 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.131556 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.171405 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.193321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.193388 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.193404 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.193434 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.193456 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.215712 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.282098 4774 generic.go:334] "Generic (PLEG): container finished" podID="48241a35-9491-44a3-aeef-5bd2424490a8" containerID="f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb" exitCode=0 Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.282193 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerDied","Data":"f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.289896 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.289958 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.289970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.289984 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.289996 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.296292 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.296359 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.296375 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.296400 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.296414 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.297659 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.310870 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.334086 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.372182 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.402451 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.402491 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.402499 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.402513 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.402521 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.411729 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.456621 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.492212 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.504339 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.504378 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.504387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.504402 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.504412 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.530620 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.570508 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.607022 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.607072 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.607083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.607098 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.607108 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.613593 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.651777 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.695345 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.709753 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.709827 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.709838 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.709856 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.709865 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.717559 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.717653 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:03:58.717635635 +0000 UTC m=+29.369834894 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.717883 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.717975 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.718063 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.718134 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718128 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718377 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718437 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718528 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:58.718519689 +0000 UTC m=+29.370718948 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718158 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718640 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718649 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718675 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:58.718667093 +0000 UTC m=+29.370866352 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718184 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718710 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:58.718704394 +0000 UTC m=+29.370903653 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718205 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: E1121 14:03:54.718845 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:03:58.718805006 +0000 UTC m=+29.371004265 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.729352 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:54Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.812423 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.812949 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.813040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.813130 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.813192 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.916183 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.916213 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.916221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.916235 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:54 crc kubenswrapper[4774]: I1121 14:03:54.916244 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:54Z","lastTransitionTime":"2025-11-21T14:03:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.018535 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.018585 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.018597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.018614 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.018625 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.092624 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.092665 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.092665 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:55 crc kubenswrapper[4774]: E1121 14:03:55.092861 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:03:55 crc kubenswrapper[4774]: E1121 14:03:55.092989 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:03:55 crc kubenswrapper[4774]: E1121 14:03:55.093066 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.121504 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.121564 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.121577 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.121597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.121611 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.224142 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.224180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.224193 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.224210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.224221 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.296568 4774 generic.go:334] "Generic (PLEG): container finished" podID="48241a35-9491-44a3-aeef-5bd2424490a8" containerID="5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661" exitCode=0 Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.296650 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerDied","Data":"5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.301614 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.303330 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.316911 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.326985 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.327018 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.327027 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.327040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.327051 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.336508 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.351230 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.367760 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.382763 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.400457 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.413851 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.431365 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.433069 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.433107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.433119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.433139 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.433152 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.447033 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.459474 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.481528 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.497049 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.512840 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.528201 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.536108 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.536169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.536182 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.536208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.536224 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.547964 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.566396 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.583279 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.601788 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.623013 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.639018 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.639073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.639088 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.639105 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.639116 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.640925 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.654659 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.668994 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.682070 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.695752 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.729908 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.742149 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.742196 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.742206 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.742226 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.742235 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.775809 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:55Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.845471 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.845514 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.845523 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.845536 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.845545 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.948121 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.948179 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.948197 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.948220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:55 crc kubenswrapper[4774]: I1121 14:03:55.948237 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:55Z","lastTransitionTime":"2025-11-21T14:03:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.051676 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.051718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.051727 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.051743 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.051755 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.155461 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.155502 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.155512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.155528 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.155536 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.258590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.258644 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.258664 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.258684 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.258694 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.310708 4774 generic.go:334] "Generic (PLEG): container finished" podID="48241a35-9491-44a3-aeef-5bd2424490a8" containerID="18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d" exitCode=0 Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.310757 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerDied","Data":"18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.335332 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.351988 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.362352 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.362422 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.362433 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.362450 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.362463 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.364616 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.381480 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.399912 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.416578 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.430798 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.453200 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.465844 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.470920 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.470947 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.470957 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.470972 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.470982 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.479343 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.496472 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.512641 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.522435 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:56Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.573800 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.573864 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.573876 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.573896 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.573907 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.700144 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.700180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.700189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.700203 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.700212 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.803554 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.803602 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.803612 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.803632 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.803643 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.906010 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.906078 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.906088 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.906106 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:56 crc kubenswrapper[4774]: I1121 14:03:56.906116 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:56Z","lastTransitionTime":"2025-11-21T14:03:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.009404 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.009470 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.009483 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.009506 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.009521 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.092962 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.093070 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:57 crc kubenswrapper[4774]: E1121 14:03:57.093099 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.093190 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:57 crc kubenswrapper[4774]: E1121 14:03:57.093338 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:03:57 crc kubenswrapper[4774]: E1121 14:03:57.093469 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.112897 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.112946 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.112964 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.112978 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.113001 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.216003 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.216055 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.216063 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.216079 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.216088 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.272662 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-z77mh"] Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.278364 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.282660 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.284592 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.284882 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.285312 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.304911 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.318271 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.318313 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.318331 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.318354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.318371 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.318749 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.321363 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.326190 4774 generic.go:334] "Generic (PLEG): container finished" podID="48241a35-9491-44a3-aeef-5bd2424490a8" containerID="61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79" exitCode=0 Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.326244 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerDied","Data":"61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.339841 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.342237 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2548a4d5-f3ff-42d9-9183-76ca04816cab-host\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.342298 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2548a4d5-f3ff-42d9-9183-76ca04816cab-serviceca\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.342339 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkmfl\" (UniqueName: \"kubernetes.io/projected/2548a4d5-f3ff-42d9-9183-76ca04816cab-kube-api-access-vkmfl\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.350170 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.364128 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.379540 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.390367 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.403675 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.415184 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.421078 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.421112 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.421123 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.421137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.421147 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.427678 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.441851 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.443463 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkmfl\" (UniqueName: \"kubernetes.io/projected/2548a4d5-f3ff-42d9-9183-76ca04816cab-kube-api-access-vkmfl\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.443617 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2548a4d5-f3ff-42d9-9183-76ca04816cab-host\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.443732 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2548a4d5-f3ff-42d9-9183-76ca04816cab-host\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.444162 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2548a4d5-f3ff-42d9-9183-76ca04816cab-serviceca\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.446173 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2548a4d5-f3ff-42d9-9183-76ca04816cab-serviceca\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.455226 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.462464 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkmfl\" (UniqueName: \"kubernetes.io/projected/2548a4d5-f3ff-42d9-9183-76ca04816cab-kube-api-access-vkmfl\") pod \"node-ca-z77mh\" (UID: \"2548a4d5-f3ff-42d9-9183-76ca04816cab\") " pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.468603 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.491311 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.505113 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.520095 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.524137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.524170 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.524180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.524199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.524210 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.534734 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.552023 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.563358 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.579108 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.591717 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.593462 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-z77mh" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.607299 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.619042 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.626196 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.626246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.626260 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.626283 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.626297 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.633494 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.648916 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.666458 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.677159 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.694204 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:57Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.734674 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.734757 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.734780 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.734804 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.734855 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.837377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.837639 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.837719 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.837859 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.837945 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.940573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.940631 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.940642 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.940657 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:57 crc kubenswrapper[4774]: I1121 14:03:57.940668 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:57Z","lastTransitionTime":"2025-11-21T14:03:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.044208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.044270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.044289 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.044311 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.044323 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.147066 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.147101 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.147112 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.147127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.147138 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.249977 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.250023 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.250035 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.250060 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.250074 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.336859 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" event={"ID":"48241a35-9491-44a3-aeef-5bd2424490a8","Type":"ContainerStarted","Data":"003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.339555 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-z77mh" event={"ID":"2548a4d5-f3ff-42d9-9183-76ca04816cab","Type":"ContainerStarted","Data":"3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.339593 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-z77mh" event={"ID":"2548a4d5-f3ff-42d9-9183-76ca04816cab","Type":"ContainerStarted","Data":"bf3f167f523d78c31a860a13cbbf51776482c796e16d2234dbc9cb3e99740d68"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.352872 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.352931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.352950 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.352971 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.352986 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.366434 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.380880 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.391777 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.409200 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.424536 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.439213 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.449153 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.454670 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.454702 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.454711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.454725 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.454733 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.465002 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.478665 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.491186 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.502632 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.515370 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.528227 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.542533 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.557235 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.557310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.557664 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.557678 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.557695 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.557704 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.571979 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.582669 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.593887 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.608143 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.630421 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.643091 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.655541 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.660155 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.660239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.660254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.660272 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.660285 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.673594 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.689325 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.706960 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.719224 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.732055 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.745459 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:58Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.755281 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755403 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:04:06.755377078 +0000 UTC m=+37.407576357 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.755551 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.755630 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.755711 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755666 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.755778 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755723 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755907 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:06.755879921 +0000 UTC m=+37.408079170 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755918 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755947 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:06.755935493 +0000 UTC m=+37.408134772 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755953 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.755973 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.756041 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:06.756021415 +0000 UTC m=+37.408220704 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.756227 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.756297 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.756359 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.756432 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:06.756420606 +0000 UTC m=+37.408619865 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.762202 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.762241 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.762254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.762274 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.762287 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.788719 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.789904 4774 scope.go:117] "RemoveContainer" containerID="e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03" Nov 21 14:03:58 crc kubenswrapper[4774]: E1121 14:03:58.790128 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.865126 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.865163 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.865172 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.865185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.865196 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.967761 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.967791 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.967801 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.967843 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:58 crc kubenswrapper[4774]: I1121 14:03:58.967856 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:58Z","lastTransitionTime":"2025-11-21T14:03:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.070974 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.071286 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.071376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.071469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.071554 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.092776 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.092947 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.092776 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:03:59 crc kubenswrapper[4774]: E1121 14:03:59.093043 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:03:59 crc kubenswrapper[4774]: E1121 14:03:59.093172 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:03:59 crc kubenswrapper[4774]: E1121 14:03:59.093387 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.174225 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.174287 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.174303 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.174326 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.174341 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.277922 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.277972 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.277983 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.278005 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.278017 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.347172 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.348079 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.348132 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.373258 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380384 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380601 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380668 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380683 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380708 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380722 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.380781 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.388465 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.407062 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.426085 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.440073 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.452959 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.472180 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.483429 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.483489 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.483504 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.483534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.483554 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.486811 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.510670 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.523468 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.536981 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.552432 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.568384 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.583582 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.585678 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.585716 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.585725 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.585744 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.585753 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.596583 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.608549 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.621678 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.637304 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.651056 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.671726 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.683840 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.688067 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.688105 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.688118 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.688140 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.688153 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.696664 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.713443 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.728237 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.765329 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.781722 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.790194 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.790256 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.790270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.790296 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.790312 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.809639 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.825460 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:03:59Z is after 2025-08-24T17:21:41Z" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.893234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.893311 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.893329 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.893355 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.893376 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.995684 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.995732 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.995746 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.995763 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:03:59 crc kubenswrapper[4774]: I1121 14:03:59.995774 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:03:59Z","lastTransitionTime":"2025-11-21T14:03:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.098458 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.098500 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.098515 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.098536 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.098551 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.158003 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.179685 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.197786 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.201561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.201647 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.201670 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.201696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.201719 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.224657 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.241325 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.256029 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.268344 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.285156 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.303359 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.305019 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.305065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.305077 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.305095 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.305105 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.318636 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.330257 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.346700 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.349476 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.365513 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.379760 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.407440 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.407485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.407497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.407513 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.407532 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.510389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.510425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.510435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.510449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.510458 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.613372 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.613431 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.613449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.613474 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.613491 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.716009 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.716046 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.716056 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.716072 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.716085 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.818866 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.818926 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.818937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.818955 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.818966 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.921840 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.921891 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.921910 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.921928 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:00 crc kubenswrapper[4774]: I1121 14:04:00.921942 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:00Z","lastTransitionTime":"2025-11-21T14:04:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.025074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.025113 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.025122 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.025143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.025154 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.026932 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.026987 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.026998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.027017 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.027027 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.040965 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:01Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.054367 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.054431 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.054446 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.054470 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.054486 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.068474 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:01Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.072171 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.072240 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.072251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.072267 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.072278 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.084606 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:01Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.092321 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.092344 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.092349 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.092870 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.092984 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.092723 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.117056 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.117093 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.117104 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.117120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.117130 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.156892 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:01Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.160990 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.161043 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.161055 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.161071 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.161083 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.176013 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:01Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:01 crc kubenswrapper[4774]: E1121 14:04:01.176163 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.178109 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.178140 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.178151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.178204 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.178221 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.281530 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.281571 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.281581 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.281597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.281614 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.352308 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.384581 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.384663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.384681 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.384712 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.384731 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.487511 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.487553 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.487565 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.487586 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.487598 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.590162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.590259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.590270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.590313 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.590328 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.693691 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.693756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.693782 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.693814 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.693883 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.797153 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.797246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.797268 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.797296 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.797316 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.899518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.899552 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.899564 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.899580 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:01 crc kubenswrapper[4774]: I1121 14:04:01.899591 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:01Z","lastTransitionTime":"2025-11-21T14:04:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.002445 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.002504 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.002518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.002537 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.002549 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.105168 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.105217 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.105230 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.105246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.105258 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.207641 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.207685 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.207697 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.207717 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.207731 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.309997 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.310083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.310095 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.310114 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.310126 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.412359 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.412400 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.412410 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.412425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.412435 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.514449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.514481 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.514491 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.514504 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.514516 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.616842 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.616883 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.616892 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.616908 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.616919 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.719044 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.719077 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.719091 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.719107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.719118 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.742492 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.822160 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.822213 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.822226 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.822246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.822262 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.925273 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.925375 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.925409 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.925445 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:02 crc kubenswrapper[4774]: I1121 14:04:02.925474 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:02Z","lastTransitionTime":"2025-11-21T14:04:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.028942 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.029343 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.029456 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.029573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.029708 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.092796 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.092796 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:03 crc kubenswrapper[4774]: E1121 14:04:03.093279 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:03 crc kubenswrapper[4774]: E1121 14:04:03.093470 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.092887 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:03 crc kubenswrapper[4774]: E1121 14:04:03.093650 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.132190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.132779 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.132793 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.132808 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.132837 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.234632 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.234663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.234672 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.234688 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.234698 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.337282 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.337334 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.337391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.337414 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.337427 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.439717 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.439778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.439790 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.439811 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.439836 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.542840 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.542899 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.542916 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.542951 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.542967 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.645482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.645519 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.645528 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.645541 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.645550 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.698065 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp"] Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.698563 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.700791 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.700966 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.721698 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.736243 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.747713 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.747745 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.747757 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.747777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.747790 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.749719 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.761555 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.772669 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.785945 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.799119 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.811403 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.811436 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.811514 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.811533 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m2zp\" (UniqueName: \"kubernetes.io/projected/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-kube-api-access-7m2zp\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.812370 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.821186 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.835682 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.847273 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.849907 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.849939 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.849952 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.849969 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.849980 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.862277 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.880598 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.894527 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.907701 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.912029 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.912084 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m2zp\" (UniqueName: \"kubernetes.io/projected/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-kube-api-access-7m2zp\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.912118 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.912146 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.913046 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.913140 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.920320 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.927711 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m2zp\" (UniqueName: \"kubernetes.io/projected/12e7b89b-cb7e-4e3e-b9ef-bf63ed997463-kube-api-access-7m2zp\") pod \"ovnkube-control-plane-749d76644c-jkjvp\" (UID: \"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.952022 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.952059 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.952073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.952090 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:03 crc kubenswrapper[4774]: I1121 14:04:03.952100 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:03Z","lastTransitionTime":"2025-11-21T14:04:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.013286 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.056681 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.056858 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.056885 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.056914 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.056943 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.159535 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.159616 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.159630 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.159647 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.159659 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.262836 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.262884 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.262896 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.262913 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.262925 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.364429 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" event={"ID":"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463","Type":"ContainerStarted","Data":"c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.364491 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" event={"ID":"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463","Type":"ContainerStarted","Data":"65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.364506 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" event={"ID":"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463","Type":"ContainerStarted","Data":"75f6c8b390115a5c6f30ac02682a5060a6d1af479c6349315013454aa714e341"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.365087 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.365146 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.365161 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.365186 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.365200 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.367537 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/0.log" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.372612 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621" exitCode=1 Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.372650 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.373503 4774 scope.go:117] "RemoveContainer" containerID="c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.377620 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.393859 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.408771 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.427083 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.446350 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.462446 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.468060 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.468120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.468138 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.468465 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.468506 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.479430 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.495966 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.552241 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.571746 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.572162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.572181 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.572189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.572205 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.572217 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.583463 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.595436 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.607921 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.619908 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.631761 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.645015 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.658890 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.671322 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.674875 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.674919 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.674933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.674955 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.674969 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.683163 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.695483 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.713593 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"message\\\":\\\".go:160\\\\nI1121 14:04:03.774958 6082 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:03.774964 6082 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:03.774987 6082 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:03.775371 6082 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1121 14:04:03.775384 6082 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:03.775557 6082 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1121 14:04:03.775606 6082 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 14:04:03.775614 6082 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 14:04:03.775638 6082 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1121 14:04:03.775644 6082 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1121 14:04:03.775649 6082 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:03.775662 6082 factory.go:656] Stopping watch factory\\\\nI1121 14:04:03.775675 6082 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:03.775680 6082 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:03.775687 6082 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:03.775705 6082 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.730933 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.742591 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.752080 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.760436 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.770649 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.781331 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.781376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.781387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.781407 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.781422 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.787037 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.800208 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.812279 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.828184 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:04Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.884447 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.884514 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.884527 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.884547 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.884559 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.986780 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.986828 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.986837 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.986855 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:04 crc kubenswrapper[4774]: I1121 14:04:04.986865 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:04Z","lastTransitionTime":"2025-11-21T14:04:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.089786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.089846 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.089855 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.089871 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.089881 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.092927 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.093034 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.093155 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.093193 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.093339 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.093467 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.185380 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-44mbn"] Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.185864 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.185921 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.192551 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.192587 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.192595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.192609 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.192618 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.199556 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.215118 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.226618 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.238096 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.249928 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.252368 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkbdr\" (UniqueName: \"kubernetes.io/projected/0d294e10-6a0e-4871-871c-01fb8e7ead03-kube-api-access-fkbdr\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.252421 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.261136 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.272030 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.285770 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.294654 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.294688 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.294697 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.294711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.294720 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.299586 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.312530 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.331086 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"message\\\":\\\".go:160\\\\nI1121 14:04:03.774958 6082 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:03.774964 6082 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:03.774987 6082 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:03.775371 6082 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1121 14:04:03.775384 6082 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:03.775557 6082 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1121 14:04:03.775606 6082 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 14:04:03.775614 6082 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 14:04:03.775638 6082 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1121 14:04:03.775644 6082 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1121 14:04:03.775649 6082 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:03.775662 6082 factory.go:656] Stopping watch factory\\\\nI1121 14:04:03.775675 6082 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:03.775680 6082 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:03.775687 6082 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:03.775705 6082 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.341568 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.350354 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.352834 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkbdr\" (UniqueName: \"kubernetes.io/projected/0d294e10-6a0e-4871-871c-01fb8e7ead03-kube-api-access-fkbdr\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.352875 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.352984 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.353039 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:05.853023569 +0000 UTC m=+36.505222828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.363578 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.370579 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkbdr\" (UniqueName: \"kubernetes.io/projected/0d294e10-6a0e-4871-871c-01fb8e7ead03-kube-api-access-fkbdr\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.378453 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/0.log" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.378840 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.380607 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.381476 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.391285 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.396529 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.396550 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.396559 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.396573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.396583 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.409771 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"message\\\":\\\".go:160\\\\nI1121 14:04:03.774958 6082 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:03.774964 6082 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:03.774987 6082 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:03.775371 6082 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1121 14:04:03.775384 6082 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:03.775557 6082 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1121 14:04:03.775606 6082 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 14:04:03.775614 6082 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 14:04:03.775638 6082 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1121 14:04:03.775644 6082 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1121 14:04:03.775649 6082 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:03.775662 6082 factory.go:656] Stopping watch factory\\\\nI1121 14:04:03.775675 6082 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:03.775680 6082 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:03.775687 6082 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:03.775705 6082 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.425837 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.437399 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.447348 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.458618 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.469754 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.480136 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.492016 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.498998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.499032 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.499045 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.499062 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.499075 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.501649 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.515051 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.524907 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.534536 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.548894 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.559290 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.571435 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.583946 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:05Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.601997 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.602077 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.602089 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.602113 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.602126 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.705706 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.706108 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.706127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.706154 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.706172 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.809284 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.809335 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.809348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.809361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.809373 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.856795 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.856957 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:05 crc kubenswrapper[4774]: E1121 14:04:05.857017 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:06.857003032 +0000 UTC m=+37.509202291 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.912185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.912247 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.912265 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.912291 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:05 crc kubenswrapper[4774]: I1121 14:04:05.912308 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:05Z","lastTransitionTime":"2025-11-21T14:04:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.015288 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.015359 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.015380 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.015406 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.015426 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.118202 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.118275 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.118298 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.118326 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.118345 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.221505 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.221560 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.221573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.221595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.221610 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.324552 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.324611 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.324626 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.324649 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.324664 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.388128 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/1.log" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.389044 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/0.log" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.392983 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9" exitCode=1 Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.393024 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.393094 4774 scope.go:117] "RemoveContainer" containerID="c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.394315 4774 scope.go:117] "RemoveContainer" containerID="fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9" Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.394681 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.411055 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.423455 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.427682 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.427736 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.427783 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.427804 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.427830 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.434802 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.451391 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.466323 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.480346 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.492652 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.507752 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.517807 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.530731 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.530791 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.530807 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.530857 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.530875 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.535274 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.554115 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.571352 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.586610 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.607356 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.622038 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.633715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.633764 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.633779 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.633802 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.633834 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.645890 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c09b29d09e245d2acca45d9a391dd5368b0406b25536487ac94dadb2e1fa3621\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"message\\\":\\\".go:160\\\\nI1121 14:04:03.774958 6082 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:03.774964 6082 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:03.774987 6082 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:03.775371 6082 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1121 14:04:03.775384 6082 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:03.775557 6082 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1121 14:04:03.775606 6082 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1121 14:04:03.775614 6082 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1121 14:04:03.775638 6082 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1121 14:04:03.775644 6082 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1121 14:04:03.775649 6082 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:03.775662 6082 factory.go:656] Stopping watch factory\\\\nI1121 14:04:03.775675 6082 handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:03.775680 6082 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:03.775687 6082 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:03.775705 6082 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:06Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.737383 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.737457 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.737490 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.737532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.737556 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.766045 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.766207 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.766240 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.766266 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766392 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:04:22.766330225 +0000 UTC m=+53.418529544 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766415 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766445 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766442 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766470 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.766510 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766461 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766570 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:22.7665415 +0000 UTC m=+53.418740799 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766649 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766899 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:22.766871919 +0000 UTC m=+53.419071378 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766927 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:22.76691591 +0000 UTC m=+53.419115409 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766923 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.766954 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.767019 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:22.766996433 +0000 UTC m=+53.419195692 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.840729 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.840773 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.840783 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.840799 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.840810 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.867296 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.867404 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: E1121 14:04:06.867454 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:08.867441368 +0000 UTC m=+39.519640627 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.942867 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.942916 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.942928 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.942952 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:06 crc kubenswrapper[4774]: I1121 14:04:06.942965 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:06Z","lastTransitionTime":"2025-11-21T14:04:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.046154 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.046232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.046252 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.046288 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.046323 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.093151 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.093151 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.093278 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.093350 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:07 crc kubenswrapper[4774]: E1121 14:04:07.093565 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:07 crc kubenswrapper[4774]: E1121 14:04:07.093722 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:07 crc kubenswrapper[4774]: E1121 14:04:07.093937 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:07 crc kubenswrapper[4774]: E1121 14:04:07.094055 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.150122 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.150181 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.150198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.150221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.150242 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.253161 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.253215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.253233 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.253261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.253287 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.357267 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.357354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.357367 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.357391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.357406 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.400435 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/1.log" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.405698 4774 scope.go:117] "RemoveContainer" containerID="fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9" Nov 21 14:04:07 crc kubenswrapper[4774]: E1121 14:04:07.405933 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.439781 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.461039 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.461111 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.461136 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.461166 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.461188 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.466121 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.482618 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.497184 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.513249 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.526012 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.545223 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.562387 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.564070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.564120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.564133 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.564157 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.564171 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.575172 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.589650 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.602575 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.616228 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.629659 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.642436 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.663607 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.666294 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.666334 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.666347 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.666364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.666377 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.696855 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:07Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.770089 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.770165 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.770192 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.770232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.770245 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.874652 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.874752 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.874786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.874860 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.874926 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.978696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.978750 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.978763 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.978783 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:07 crc kubenswrapper[4774]: I1121 14:04:07.978796 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:07Z","lastTransitionTime":"2025-11-21T14:04:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.081650 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.081711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.081736 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.081765 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.081786 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.184529 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.184564 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.184573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.184587 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.184596 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.286579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.287887 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.287904 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.287917 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.287926 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.391014 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.391080 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.391091 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.391111 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.391123 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.493047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.493083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.493095 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.493133 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.493144 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.596671 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.596731 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.596749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.596775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.596797 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.699895 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.699937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.699947 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.699963 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.699973 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.802176 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.802230 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.802245 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.802268 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.802281 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.887065 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:08 crc kubenswrapper[4774]: E1121 14:04:08.887312 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:08 crc kubenswrapper[4774]: E1121 14:04:08.887398 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:12.887376705 +0000 UTC m=+43.539575964 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.905165 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.905222 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.905234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.905254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:08 crc kubenswrapper[4774]: I1121 14:04:08.905266 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:08Z","lastTransitionTime":"2025-11-21T14:04:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.008657 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.008714 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.008729 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.008747 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.008759 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.093024 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:09 crc kubenswrapper[4774]: E1121 14:04:09.093149 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.093218 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.093275 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:09 crc kubenswrapper[4774]: E1121 14:04:09.093372 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.093489 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:09 crc kubenswrapper[4774]: E1121 14:04:09.093728 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:09 crc kubenswrapper[4774]: E1121 14:04:09.094090 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.111919 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.111979 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.111992 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.112012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.112024 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.214562 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.214634 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.214645 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.214671 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.214689 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.317441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.317540 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.317565 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.317591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.317609 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.419758 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.419812 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.419844 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.419862 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.419877 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.522042 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.522073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.522112 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.522138 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.522149 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.624992 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.625055 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.625076 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.625097 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.625112 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.727664 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.727712 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.727726 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.727749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.727761 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.830142 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.830185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.830196 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.830214 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.830226 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.933146 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.933210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.933237 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.933261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:09 crc kubenswrapper[4774]: I1121 14:04:09.933277 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:09Z","lastTransitionTime":"2025-11-21T14:04:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.036697 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.036764 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.036777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.036794 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.036806 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.111902 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.128326 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.139296 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.139345 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.139356 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.139372 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.139383 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.143197 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.155035 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.168891 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.185444 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.201302 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.214342 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.235032 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.244283 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.244340 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.244362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.244387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.244406 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.250337 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.274972 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.292183 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.306571 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.319891 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.332329 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.347111 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.347170 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.347190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.347214 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.347233 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.347674 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.450406 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.450480 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.450501 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.450527 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.450544 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.576436 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.576506 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.576526 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.576552 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.576576 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.680937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.681026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.681044 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.681065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.681086 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.783886 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.783966 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.783982 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.784003 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.784016 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.887369 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.887428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.887441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.887464 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.887478 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.991110 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.991175 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.991185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.991208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:10 crc kubenswrapper[4774]: I1121 14:04:10.991220 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:10Z","lastTransitionTime":"2025-11-21T14:04:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.092261 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.092356 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.092433 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.092453 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.092673 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.092777 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.092941 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.093065 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.094668 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.094735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.094761 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.094791 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.094878 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.199298 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.199362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.199377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.199399 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.199414 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.302529 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.302581 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.302596 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.302619 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.302634 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.327363 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.327441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.327469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.327505 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.327527 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.352046 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:11Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.358027 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.358084 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.358097 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.358121 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.358134 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.378890 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:11Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.385340 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.385405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.385426 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.385459 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.385486 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.410130 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:11Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.415921 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.415993 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.416011 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.416040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.416059 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.434162 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:11Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.439055 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.439106 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.439116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.439136 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.439147 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.454910 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:11Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:11 crc kubenswrapper[4774]: E1121 14:04:11.455038 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.457306 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.457344 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.457358 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.457384 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.457398 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.560078 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.560116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.560132 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.560153 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.560170 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.662695 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.662745 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.662755 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.662771 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.662780 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.765791 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.765907 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.765937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.765969 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.765987 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.869541 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.869590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.869602 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.869621 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.869634 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.972456 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.972526 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.972543 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.972567 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:11 crc kubenswrapper[4774]: I1121 14:04:11.972582 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:11Z","lastTransitionTime":"2025-11-21T14:04:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.076134 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.076195 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.076204 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.076219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.076230 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.093398 4774 scope.go:117] "RemoveContainer" containerID="e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.179473 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.179977 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.179992 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.180012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.180023 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.282621 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.282659 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.282674 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.282692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.282705 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.384915 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.384958 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.384971 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.384989 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.385005 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.426282 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.428107 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.428895 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.450038 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.466594 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.484641 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.489075 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.489103 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.489112 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.489125 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.489134 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.501538 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.515740 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.534539 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.549330 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.561326 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.574581 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.584997 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.591189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.591219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.591229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.591246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.591258 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.597356 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.612420 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.624437 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.633624 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.646878 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.659126 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:12Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.694297 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.694345 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.694357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.694375 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.694388 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.799349 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.799397 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.799418 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.799436 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.799448 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.902016 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.902051 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.902061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.902073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.902081 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:12Z","lastTransitionTime":"2025-11-21T14:04:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:12 crc kubenswrapper[4774]: I1121 14:04:12.929208 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:12 crc kubenswrapper[4774]: E1121 14:04:12.929438 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:12 crc kubenswrapper[4774]: E1121 14:04:12.929544 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:20.929521697 +0000 UTC m=+51.581721026 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.004557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.004588 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.004597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.004610 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.004619 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.092704 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.092731 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:13 crc kubenswrapper[4774]: E1121 14:04:13.092837 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:13 crc kubenswrapper[4774]: E1121 14:04:13.092958 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.093122 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.093201 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:13 crc kubenswrapper[4774]: E1121 14:04:13.093222 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:13 crc kubenswrapper[4774]: E1121 14:04:13.093460 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.107141 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.107193 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.107207 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.107226 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.107239 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.210209 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.210251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.210265 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.210284 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.210297 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.313333 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.313602 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.313686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.313775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.313870 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.417000 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.417889 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.418030 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.418197 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.418338 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.521236 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.521266 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.521284 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.521299 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.521308 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.623748 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.623864 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.623892 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.623963 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.623989 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.726841 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.726878 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.726888 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.726905 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.726914 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.829419 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.829475 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.829495 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.829517 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.829533 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.931644 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.931680 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.931689 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.931703 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:13 crc kubenswrapper[4774]: I1121 14:04:13.931712 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:13Z","lastTransitionTime":"2025-11-21T14:04:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.034360 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.034405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.034417 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.034438 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.034450 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.137026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.137089 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.137107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.137130 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.137148 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.240142 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.240215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.240234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.240265 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.240288 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.343445 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.343501 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.343515 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.343532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.343567 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.446171 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.446223 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.446234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.446253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.446265 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.548876 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.548934 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.548951 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.548972 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.548988 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.652403 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.652483 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.652494 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.652513 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.652526 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.760145 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.760208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.760227 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.760251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.760269 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.862937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.862999 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.863015 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.863034 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.863047 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.965541 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.965612 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.965630 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.965659 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:14 crc kubenswrapper[4774]: I1121 14:04:14.965677 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:14Z","lastTransitionTime":"2025-11-21T14:04:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.069508 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.069561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.069573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.069589 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.070023 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.092350 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.092431 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.092484 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:15 crc kubenswrapper[4774]: E1121 14:04:15.092651 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.093247 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:15 crc kubenswrapper[4774]: E1121 14:04:15.093365 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:15 crc kubenswrapper[4774]: E1121 14:04:15.093464 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:15 crc kubenswrapper[4774]: E1121 14:04:15.093557 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.173176 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.173253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.173277 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.173307 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.173331 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.276515 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.276591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.276615 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.276648 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.276669 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.379388 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.379463 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.379483 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.379589 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.379662 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.482593 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.482658 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.482677 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.482699 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.482714 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.585052 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.585117 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.585129 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.585148 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.585162 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.688301 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.688349 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.688358 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.688374 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.688390 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.790729 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.790770 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.790781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.790795 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.790864 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.894288 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.894357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.894372 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.894396 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.894413 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.997635 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.997731 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.997765 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.997875 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:15 crc kubenswrapper[4774]: I1121 14:04:15.997902 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:15Z","lastTransitionTime":"2025-11-21T14:04:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.100242 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.100298 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.100310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.100327 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.100342 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.203055 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.203100 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.203115 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.203131 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.203140 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.306475 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.306904 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.306924 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.306945 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.306962 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.409554 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.409626 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.409647 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.409678 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.409704 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.512846 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.512896 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.512919 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.512941 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.512956 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.615988 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.616048 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.616061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.616083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.616097 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.719035 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.719134 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.719145 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.719166 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.719178 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.822260 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.822334 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.822344 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.822360 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.822372 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.925147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.925190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.925199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.925213 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:16 crc kubenswrapper[4774]: I1121 14:04:16.925223 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:16Z","lastTransitionTime":"2025-11-21T14:04:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.027570 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.027605 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.027614 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.027626 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.027635 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.092790 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.092844 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.092889 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.093029 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:17 crc kubenswrapper[4774]: E1121 14:04:17.093034 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:17 crc kubenswrapper[4774]: E1121 14:04:17.093084 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:17 crc kubenswrapper[4774]: E1121 14:04:17.093216 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:17 crc kubenswrapper[4774]: E1121 14:04:17.093377 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.130572 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.130641 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.130657 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.130672 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.130702 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.233601 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.233656 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.233668 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.233688 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.233703 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.336418 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.336490 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.336533 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.336566 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.336590 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.439210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.439280 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.439298 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.439319 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.439332 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.542696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.542751 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.542765 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.542785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.542800 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.645886 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.645933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.645965 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.645984 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.645999 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.748891 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.748961 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.748980 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.749008 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.749034 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.851518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.851575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.851593 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.851618 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.851638 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.955134 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.955194 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.955212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.955239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:17 crc kubenswrapper[4774]: I1121 14:04:17.955260 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:17Z","lastTransitionTime":"2025-11-21T14:04:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.058680 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.058743 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.058761 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.058787 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.058806 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.161797 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.161929 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.161979 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.162004 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.162055 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.265270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.265332 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.265349 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.265375 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.265393 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.368346 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.368427 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.368449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.368482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.368506 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.471122 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.471196 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.471214 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.471243 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.471266 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.574042 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.574087 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.574098 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.574116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.574127 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.677954 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.678292 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.678387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.678474 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.678574 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.780855 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.781189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.781354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.781456 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.781535 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.885596 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.885688 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.885708 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.885735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.885752 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.989244 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.989306 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.989325 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.989348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:18 crc kubenswrapper[4774]: I1121 14:04:18.989364 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:18Z","lastTransitionTime":"2025-11-21T14:04:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092118 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092190 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092205 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:19 crc kubenswrapper[4774]: E1121 14:04:19.092295 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092311 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:19 crc kubenswrapper[4774]: E1121 14:04:19.092425 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092507 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: E1121 14:04:19.092530 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092545 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.092608 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: E1121 14:04:19.092630 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.196074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.196151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.196166 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.196191 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.196206 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.299469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.299527 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.299537 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.299558 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.299570 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.402756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.402874 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.402894 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.402934 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.402955 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.506495 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.506560 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.506575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.506601 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.506615 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.610473 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.610604 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.610635 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.610663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.610685 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.714149 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.714208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.714220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.714243 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.714256 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.817324 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.817393 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.817415 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.817446 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.817468 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.920138 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.920198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.920216 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.920242 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:19 crc kubenswrapper[4774]: I1121 14:04:19.920259 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:19Z","lastTransitionTime":"2025-11-21T14:04:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.023074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.023145 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.023164 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.023190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.023209 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.121074 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.126351 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.126410 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.126433 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.126460 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.126481 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.139889 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.156606 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.174651 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.196523 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.210336 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.222243 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.229520 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.229563 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.229579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.229603 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.229622 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.239939 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.253337 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.262808 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.277946 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.292104 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.306541 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.319887 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.330999 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.331927 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.331986 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.332005 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.332028 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.332045 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.341114 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:20Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.434901 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.434962 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.434984 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.435012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.435034 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.538280 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.538445 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.538469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.538544 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.538565 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.641745 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.641871 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.641888 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.641905 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.641946 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.745597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.745672 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.745698 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.745732 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.745757 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.848657 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.848715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.848735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.848759 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.848776 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.931199 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:20 crc kubenswrapper[4774]: E1121 14:04:20.931441 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:20 crc kubenswrapper[4774]: E1121 14:04:20.931558 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:36.931534296 +0000 UTC m=+67.583733565 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.951393 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.951477 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.951502 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.951539 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:20 crc kubenswrapper[4774]: I1121 14:04:20.951577 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:20Z","lastTransitionTime":"2025-11-21T14:04:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.054599 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.054671 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.054759 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.054786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.054908 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.092320 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.092364 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.092483 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.092482 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.092742 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.093260 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.093358 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.093424 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.093852 4774 scope.go:117] "RemoveContainer" containerID="fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.158634 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.158688 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.158706 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.158728 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.158743 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.261691 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.262097 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.262348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.262621 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.262842 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.365441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.365476 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.365485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.365499 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.365508 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.462317 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/1.log" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.467411 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.467442 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.467452 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.467468 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.467479 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.468122 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.468796 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.489860 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.505812 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.528467 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.547038 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.568192 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.570393 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.570565 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.570631 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.570718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.570809 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.581847 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.592057 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.603212 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.614303 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.625993 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.643881 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.655562 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.664937 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.673692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.673760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.673780 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.673810 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.673860 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.681224 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.695680 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.713341 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.776471 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.776518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.776531 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.776549 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.776573 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.841924 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.841972 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.841982 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.841998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.842009 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.855239 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.860936 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.860981 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.860996 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.861012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.861023 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.876909 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.881210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.881253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.881264 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.881280 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.881293 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.897412 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.900880 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.900908 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.900916 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.900933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.900942 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.933905 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.939496 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.939527 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.939536 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.939553 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.939563 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.957511 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:21Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:21 crc kubenswrapper[4774]: E1121 14:04:21.957674 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.959325 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.959374 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.959389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.959408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:21 crc kubenswrapper[4774]: I1121 14:04:21.959420 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:21Z","lastTransitionTime":"2025-11-21T14:04:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.062083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.062117 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.062130 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.062147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.062159 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.165219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.165287 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.165303 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.165333 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.165350 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.269466 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.269761 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.269778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.269799 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.269815 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.372455 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.372510 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.372527 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.372550 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.372568 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.474135 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/2.log" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.474272 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.474319 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.474331 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.474351 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.474365 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.475556 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/1.log" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.479893 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1" exitCode=1 Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.480154 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.480204 4774 scope.go:117] "RemoveContainer" containerID="fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.481233 4774 scope.go:117] "RemoveContainer" containerID="3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1" Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.481431 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.502578 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.521245 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.538764 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.554735 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.571450 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.579903 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.580001 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.580015 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.580037 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.580053 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.593845 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.605578 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.621298 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.636074 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.647890 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.664565 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.678517 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.682403 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.682461 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.682479 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.682836 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.682871 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.692238 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.706012 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.725278 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.746524 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd516d890368bb2bc0e35c6e5a434c9b36110cbcf5d44e1939ff1a336b1b00a9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"n:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400027 6279 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:05.399922 6279 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:05.399871 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1121 14:04:05.400560 6279 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:960d98b2-dc64-4e93-a4b6-9b19847af71e}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1121 14:04:05.400601 6279 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:22Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.786301 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.786355 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.786370 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.786389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.786402 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.854224 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854398 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:04:54.854377866 +0000 UTC m=+85.506577125 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.854433 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.854483 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.854507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.854537 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854596 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854605 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854614 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854630 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854639 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854654 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854666 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854667 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:54.854655283 +0000 UTC m=+85.506854542 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854695 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854707 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:54.854698545 +0000 UTC m=+85.506897804 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854862 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:54.854852529 +0000 UTC m=+85.507051788 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:22 crc kubenswrapper[4774]: E1121 14:04:22.854877 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:04:54.854870099 +0000 UTC m=+85.507069358 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.889120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.889162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.889173 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.889185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.889194 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.992353 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.992406 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.992419 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.992436 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:22 crc kubenswrapper[4774]: I1121 14:04:22.992450 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:22Z","lastTransitionTime":"2025-11-21T14:04:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.092655 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:23 crc kubenswrapper[4774]: E1121 14:04:23.092788 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.092657 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.092655 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.092676 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:23 crc kubenswrapper[4774]: E1121 14:04:23.092943 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:23 crc kubenswrapper[4774]: E1121 14:04:23.093337 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:23 crc kubenswrapper[4774]: E1121 14:04:23.093305 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.094476 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.094509 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.094519 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.094533 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.094546 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.196633 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.196697 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.196710 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.196728 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.196740 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.299617 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.299681 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.299695 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.299713 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.299726 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.402370 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.402424 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.402435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.402452 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.402463 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.484710 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/2.log" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.487554 4774 scope.go:117] "RemoveContainer" containerID="3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1" Nov 21 14:04:23 crc kubenswrapper[4774]: E1121 14:04:23.487725 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.499955 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.504212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.504240 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.504247 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.504261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.504269 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.512649 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.523892 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.536672 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.550256 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.564303 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.574759 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.588463 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.600082 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.606721 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.606758 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.606768 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.606784 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.606798 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.612912 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.626613 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.638796 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.652174 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.666248 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.678389 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.697683 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:23Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.709435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.709469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.709479 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.709502 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.709515 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.811439 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.811478 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.811486 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.811499 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.811508 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.914647 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.914685 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.914696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.914737 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:23 crc kubenswrapper[4774]: I1121 14:04:23.914747 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:23Z","lastTransitionTime":"2025-11-21T14:04:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.017229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.017436 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.017450 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.017462 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.017471 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.113987 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.120238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.120283 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.120294 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.120310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.120323 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.125261 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.136395 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.151262 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.163968 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.175920 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.185964 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.196646 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.212700 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.222364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.222405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.222414 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.222430 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.222440 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.227920 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.237850 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.253082 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.269210 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.284118 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.328679 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.328729 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.328744 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.328765 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.328777 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.343994 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.359088 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.374845 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.392335 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:24Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.431505 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.431547 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.431560 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.431578 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.431593 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.534836 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.534885 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.534898 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.534914 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.534923 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.638391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.638861 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.638930 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.638998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.639058 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.742367 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.742409 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.742425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.742444 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.742456 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.845350 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.845444 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.845470 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.845497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.845576 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.948785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.948858 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.948870 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.948890 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:24 crc kubenswrapper[4774]: I1121 14:04:24.948900 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:24Z","lastTransitionTime":"2025-11-21T14:04:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.052019 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.052057 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.052067 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.052081 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.052090 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.099243 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.099300 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.099261 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:25 crc kubenswrapper[4774]: E1121 14:04:25.099438 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:25 crc kubenswrapper[4774]: E1121 14:04:25.099526 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:25 crc kubenswrapper[4774]: E1121 14:04:25.099664 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.099769 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:25 crc kubenswrapper[4774]: E1121 14:04:25.100171 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.154831 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.154873 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.154887 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.154904 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.154919 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.257936 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.257992 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.258005 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.258022 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.258060 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.360915 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.361165 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.361229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.361315 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.361394 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.464449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.464518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.464529 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.464550 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.464560 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.567165 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.567240 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.567264 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.567300 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.567323 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.669988 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.670034 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.670047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.670068 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.670084 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.772981 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.773026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.773042 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.773063 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.773079 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.876252 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.876309 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.876325 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.876348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.876362 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.978945 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.978984 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.978993 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.979009 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:25 crc kubenswrapper[4774]: I1121 14:04:25.979020 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:25Z","lastTransitionTime":"2025-11-21T14:04:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.081991 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.082033 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.082047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.082071 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.082085 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.184883 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.184951 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.184963 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.184983 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.184996 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.288605 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.288670 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.288681 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.288700 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.288712 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.391744 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.391802 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.391846 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.391871 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.391884 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.494484 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.494558 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.494575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.494601 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.494618 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.598322 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.598402 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.598421 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.598447 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.598465 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.701202 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.701281 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.701306 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.701375 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.701394 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.804455 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.804518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.804553 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.804588 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.804610 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.907197 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.907232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.907243 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.907261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:26 crc kubenswrapper[4774]: I1121 14:04:26.907273 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:26Z","lastTransitionTime":"2025-11-21T14:04:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.009958 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.010012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.010027 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.010048 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.010064 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.093027 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.093102 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.093220 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:27 crc kubenswrapper[4774]: E1121 14:04:27.093267 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.093055 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:27 crc kubenswrapper[4774]: E1121 14:04:27.093462 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:27 crc kubenswrapper[4774]: E1121 14:04:27.093592 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:27 crc kubenswrapper[4774]: E1121 14:04:27.093757 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.111740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.111789 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.111806 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.111862 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.111883 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.217127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.217211 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.217254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.217289 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.217314 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.320665 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.320727 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.320745 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.320772 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.320792 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.423729 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.423805 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.423837 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.423870 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.423882 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.526900 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.526970 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.526994 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.527025 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.527050 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.630117 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.630185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.630203 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.630234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.630254 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.735065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.735159 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.735178 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.735211 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.735227 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.818756 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.837797 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.837907 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.837927 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.837958 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.837976 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.844940 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.860382 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.875180 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.888631 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.902253 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.914234 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.925937 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.940009 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.941876 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.941931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.941975 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.941997 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.942007 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:27Z","lastTransitionTime":"2025-11-21T14:04:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.956017 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.969680 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.983926 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:27 crc kubenswrapper[4774]: I1121 14:04:27.996532 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:27Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.009073 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:28Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.022259 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:28Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.035115 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:28Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.044061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.044099 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.044111 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.044129 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.044139 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.049255 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:28Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.068767 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:28Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.147300 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.147346 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.147358 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.147380 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.147397 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.250873 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.250922 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.250933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.250950 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.250962 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.354308 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.354352 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.354361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.354377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.354387 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.457311 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.457620 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.457768 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.457902 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.457986 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.562113 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.562154 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.562164 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.562178 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.562191 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.665289 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.665361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.665381 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.665408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.665426 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.772239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.772292 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.772350 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.772372 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.772387 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.875254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.875328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.875353 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.875381 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.875404 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.978379 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.978477 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.978503 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.978532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:28 crc kubenswrapper[4774]: I1121 14:04:28.978550 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:28Z","lastTransitionTime":"2025-11-21T14:04:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.080858 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.080893 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.080902 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.080915 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.080923 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.092431 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.092445 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.092607 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:29 crc kubenswrapper[4774]: E1121 14:04:29.092616 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.092574 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:29 crc kubenswrapper[4774]: E1121 14:04:29.092716 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:29 crc kubenswrapper[4774]: E1121 14:04:29.092872 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:29 crc kubenswrapper[4774]: E1121 14:04:29.093143 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.184253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.184297 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.184308 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.184328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.184338 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.289363 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.289413 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.289428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.289448 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.289462 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.391417 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.391482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.391503 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.391525 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.391542 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.493877 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.493912 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.493921 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.493935 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.493943 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.596181 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.596230 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.596244 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.596263 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.596277 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.699134 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.699176 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.699189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.699210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.699225 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.802220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.802299 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.802321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.802353 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.802376 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.905996 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.906050 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.906063 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.906087 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:29 crc kubenswrapper[4774]: I1121 14:04:29.906099 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:29Z","lastTransitionTime":"2025-11-21T14:04:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.008386 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.008470 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.008492 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.008520 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.008537 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.111718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.111750 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.111760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.111774 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.111784 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.128415 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.146655 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.164520 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.179440 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.192421 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.203730 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.213700 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.213743 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.213757 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.213778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.213793 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.220526 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.234760 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.245089 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.259011 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.269802 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.285534 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.300669 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.316101 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.316127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.316136 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.316149 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.316158 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.317969 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.332710 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.347656 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.367486 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:30Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.418718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.419138 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.419211 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.419244 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.419263 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.521016 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.521054 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.521064 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.521080 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.521094 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.623955 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.624031 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.624049 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.624071 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.624087 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.726252 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.726288 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.726296 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.726309 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.726320 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.829576 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.829637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.829654 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.829678 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.829696 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.934052 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.935599 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.935686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.935770 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:30 crc kubenswrapper[4774]: I1121 14:04:30.935884 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:30Z","lastTransitionTime":"2025-11-21T14:04:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.038655 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.038700 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.038715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.038732 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.038744 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.093029 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:31 crc kubenswrapper[4774]: E1121 14:04:31.093205 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.093298 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:31 crc kubenswrapper[4774]: E1121 14:04:31.093382 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.093465 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:31 crc kubenswrapper[4774]: E1121 14:04:31.093558 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.093624 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:31 crc kubenswrapper[4774]: E1121 14:04:31.093704 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.141744 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.141797 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.141809 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.141848 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.141860 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.247509 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.247581 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.247602 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.247626 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.247645 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.351331 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.351387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.351405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.351430 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.351455 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.453359 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.453399 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.453411 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.453428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.453438 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.555849 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.555901 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.555910 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.555931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.555942 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.658455 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.658493 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.658503 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.658518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.658528 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.761026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.761083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.761098 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.761120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.761137 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.863397 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.863450 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.863466 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.863481 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.863490 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.966363 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.966428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.966444 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.966463 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:31 crc kubenswrapper[4774]: I1121 14:04:31.966475 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:31Z","lastTransitionTime":"2025-11-21T14:04:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.069630 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.069680 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.069690 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.069705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.069714 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.171783 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.171839 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.171853 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.171869 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.171879 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.274364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.274424 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.274435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.274450 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.274462 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.339967 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.340039 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.340057 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.340087 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.340104 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: E1121 14:04:32.362314 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:32Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.367754 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.368070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.368215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.368349 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.368478 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: E1121 14:04:32.390501 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:32Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.395116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.395262 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.395351 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.395452 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.395543 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: E1121 14:04:32.415064 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:32Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.419853 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.419920 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.419934 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.419953 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.419965 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: E1121 14:04:32.433011 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:32Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.437216 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.437557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.437632 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.437696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.437759 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: E1121 14:04:32.450222 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:32Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:32 crc kubenswrapper[4774]: E1121 14:04:32.450382 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.452034 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.452073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.452082 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.452100 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.452110 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.554047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.554094 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.554107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.554129 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.554142 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.659286 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.659573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.659662 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.659743 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.659867 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.762317 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.762390 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.762405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.762425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.762436 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.865640 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.865695 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.865705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.865719 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.865729 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.968518 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.968591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.968600 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.968615 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:32 crc kubenswrapper[4774]: I1121 14:04:32.968625 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:32Z","lastTransitionTime":"2025-11-21T14:04:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.072193 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.072240 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.072251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.072271 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.072286 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.092772 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.092808 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:33 crc kubenswrapper[4774]: E1121 14:04:33.093064 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.092847 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:33 crc kubenswrapper[4774]: E1121 14:04:33.093182 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:33 crc kubenswrapper[4774]: E1121 14:04:33.093247 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.094002 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:33 crc kubenswrapper[4774]: E1121 14:04:33.094302 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.174782 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.174883 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.174898 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.174919 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.174934 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.278425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.278468 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.278482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.278502 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.278513 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.380912 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.381012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.381023 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.381038 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.381050 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.486664 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.486751 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.486770 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.486868 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.486887 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.589860 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.589916 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.589934 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.589984 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.590002 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.693082 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.693141 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.693156 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.693177 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.693191 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.796225 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.796275 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.796290 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.796308 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.796334 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.899462 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.899532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.899556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.899587 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:33 crc kubenswrapper[4774]: I1121 14:04:33.899606 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:33Z","lastTransitionTime":"2025-11-21T14:04:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.002093 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.002136 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.002147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.002195 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.002227 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.094165 4774 scope.go:117] "RemoveContainer" containerID="3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1" Nov 21 14:04:34 crc kubenswrapper[4774]: E1121 14:04:34.094313 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.103783 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.103844 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.103863 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.103881 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.103894 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.206254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.206310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.206323 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.206341 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.206706 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.309446 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.309482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.309494 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.309510 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.309521 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.411790 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.411850 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.411863 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.411880 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.411892 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.515132 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.515176 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.515186 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.515205 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.515215 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.618525 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.618581 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.618598 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.618630 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.618648 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.720960 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.721005 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.721020 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.721038 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.721053 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.823101 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.823145 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.823159 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.823174 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.823184 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.925790 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.925838 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.925851 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.925867 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:34 crc kubenswrapper[4774]: I1121 14:04:34.925877 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:34Z","lastTransitionTime":"2025-11-21T14:04:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.030462 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.030543 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.030561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.030595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.030619 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.092893 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:35 crc kubenswrapper[4774]: E1121 14:04:35.093026 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.093062 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.093127 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.093212 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:35 crc kubenswrapper[4774]: E1121 14:04:35.093263 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:35 crc kubenswrapper[4774]: E1121 14:04:35.093365 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:35 crc kubenswrapper[4774]: E1121 14:04:35.093498 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.132943 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.132983 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.132992 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.133007 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.133018 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.236422 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.236462 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.236471 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.236484 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.236497 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.338625 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.338663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.338674 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.338690 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.338701 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.442489 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.442534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.442544 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.442561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.442572 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.544071 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.544104 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.544115 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.544131 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.544142 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.647319 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.647380 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.647391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.647405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.647415 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.750011 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.750057 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.750067 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.750082 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.750093 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.852778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.852835 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.852848 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.852880 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.852891 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.955632 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.955686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.955707 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.955730 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:35 crc kubenswrapper[4774]: I1121 14:04:35.955747 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:35Z","lastTransitionTime":"2025-11-21T14:04:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.058355 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.058398 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.058408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.058430 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.058439 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.160621 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.160672 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.160694 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.160752 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.160770 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.263637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.263679 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.263693 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.263711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.263723 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.366563 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.366617 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.366630 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.366654 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.366669 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.469157 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.469202 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.469212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.469229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.469239 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.571633 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.571702 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.571721 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.571746 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.571766 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.674143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.674180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.674189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.674204 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.674215 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.775991 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.776027 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.776037 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.776050 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.776062 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.878119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.878152 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.878161 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.878173 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.878182 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.980985 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.981024 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.981033 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.981048 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:36 crc kubenswrapper[4774]: I1121 14:04:36.981060 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:36Z","lastTransitionTime":"2025-11-21T14:04:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.006657 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:37 crc kubenswrapper[4774]: E1121 14:04:37.006794 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:37 crc kubenswrapper[4774]: E1121 14:04:37.006894 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:09.006872522 +0000 UTC m=+99.659071781 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.083417 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.083456 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.083466 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.083479 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.083490 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.092720 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.092776 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.092718 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:37 crc kubenswrapper[4774]: E1121 14:04:37.092845 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:37 crc kubenswrapper[4774]: E1121 14:04:37.092903 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.092777 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:37 crc kubenswrapper[4774]: E1121 14:04:37.092972 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:37 crc kubenswrapper[4774]: E1121 14:04:37.093016 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.187381 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.187503 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.187525 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.187554 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.187579 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.290136 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.290174 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.290183 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.290198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.290208 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.392531 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.392905 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.392991 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.393096 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.393184 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.495078 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.495121 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.495133 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.495152 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.495164 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.597671 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.597976 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.598060 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.598144 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.598218 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.701038 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.701074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.701084 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.701100 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.701110 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.803875 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.803980 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.803998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.804025 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.804047 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.907931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.908008 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.908026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.908049 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:37 crc kubenswrapper[4774]: I1121 14:04:37.908063 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:37Z","lastTransitionTime":"2025-11-21T14:04:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.009977 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.010290 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.010446 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.010554 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.010652 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.112916 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.113461 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.113567 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.113644 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.113700 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.216534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.216575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.216588 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.216604 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.216616 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.318523 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.318597 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.318621 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.318652 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.318674 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.420722 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.420756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.420763 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.420777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.420786 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.522787 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.523123 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.523220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.523317 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.523412 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.625985 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.626020 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.626028 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.626041 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.626050 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.728604 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.728645 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.728655 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.728670 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.728683 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.830937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.830966 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.830977 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.830992 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.831002 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.933441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.933508 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.933521 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.933534 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:38 crc kubenswrapper[4774]: I1121 14:04:38.933545 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:38Z","lastTransitionTime":"2025-11-21T14:04:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.035753 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.035799 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.035810 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.035854 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.035867 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.092660 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.092735 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.092863 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.092914 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:39 crc kubenswrapper[4774]: E1121 14:04:39.093081 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:39 crc kubenswrapper[4774]: E1121 14:04:39.093147 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:39 crc kubenswrapper[4774]: E1121 14:04:39.093218 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:39 crc kubenswrapper[4774]: E1121 14:04:39.093286 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.138284 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.138336 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.138347 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.138373 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.138386 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.240804 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.240913 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.240930 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.240955 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.240972 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.349323 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.349382 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.349395 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.349413 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.349425 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.452073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.452189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.452203 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.452221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.452233 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.539763 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/0.log" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.539814 4774 generic.go:334] "Generic (PLEG): container finished" podID="0bf8b868-6e71-4073-a9ad-e2ac8ae15215" containerID="258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76" exitCode=1 Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.539871 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerDied","Data":"258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.540271 4774 scope.go:117] "RemoveContainer" containerID="258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.554402 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.555697 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.555749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.555764 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.555781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.555793 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.568752 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.584135 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.622422 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.646177 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.658189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.658221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.658229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.658246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.658254 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.680035 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.694242 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.706233 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.719412 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.732528 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.745634 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.761065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.761094 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.761103 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.761116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.761127 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.767506 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.785148 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.799513 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.810349 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.820140 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.832212 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:39Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.863104 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.863158 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.863168 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.863184 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.863194 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.966353 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.966406 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.966419 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.966436 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:39 crc kubenswrapper[4774]: I1121 14:04:39.966452 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:39Z","lastTransitionTime":"2025-11-21T14:04:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.069524 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.069573 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.069585 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.069603 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.069614 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.109147 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.122566 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.133657 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.144651 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.157532 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.171907 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.171957 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.171974 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.171999 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.172011 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.174937 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.194477 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.208125 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.224920 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.237429 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.253103 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.266531 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.274778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.274957 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.275028 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.275100 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.275160 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.277794 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.289147 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.301676 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.314016 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:39Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.334104 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.377844 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.377892 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.377904 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.377926 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.377940 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.480661 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.480696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.480707 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.480722 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.480731 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.544281 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/0.log" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.544338 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerStarted","Data":"2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.560091 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.573401 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.585571 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.585604 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.585613 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.585627 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.585637 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.586134 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.598522 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.609459 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.621408 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.639154 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.655650 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.666970 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.677097 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.686392 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.687775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.687810 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.687835 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.687850 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.687865 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.696264 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.707056 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.719767 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.730957 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.741224 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.756103 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:40Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.789568 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.789594 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.789605 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.789619 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.789631 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.894299 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.894345 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.894357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.894370 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.894379 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.997143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.997192 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.997205 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.997222 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:40 crc kubenswrapper[4774]: I1121 14:04:40.997235 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:40Z","lastTransitionTime":"2025-11-21T14:04:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.093000 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:41 crc kubenswrapper[4774]: E1121 14:04:41.093404 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.093119 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:41 crc kubenswrapper[4774]: E1121 14:04:41.093592 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.093127 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:41 crc kubenswrapper[4774]: E1121 14:04:41.093756 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.093109 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:41 crc kubenswrapper[4774]: E1121 14:04:41.093955 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.099917 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.099962 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.099972 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.099986 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.099997 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.202170 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.202208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.202222 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.202239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.202251 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.304422 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.304460 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.304468 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.304481 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.304490 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.407689 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.407740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.407755 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.407776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.407792 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.509970 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.510007 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.510017 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.510031 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.510042 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.612675 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.612710 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.612718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.612733 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.612742 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.714933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.714973 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.714981 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.714994 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.715005 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.830809 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.830902 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.830922 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.830947 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.830963 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.933120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.933159 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.933169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.933182 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:41 crc kubenswrapper[4774]: I1121 14:04:41.933191 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:41Z","lastTransitionTime":"2025-11-21T14:04:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.035469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.035536 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.035552 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.035570 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.035582 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.137231 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.137275 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.137284 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.137301 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.137312 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.240120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.240196 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.240212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.240236 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.240251 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.343160 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.343208 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.343219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.343235 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.343248 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.446312 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.446406 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.446420 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.446444 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.446455 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.549661 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.549703 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.549715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.549733 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.549746 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.651632 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.651672 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.651682 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.651703 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.651719 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.754215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.754257 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.754269 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.754285 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.754296 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.766967 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.766995 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.767003 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.767014 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.767022 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: E1121 14:04:42.786456 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:42Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.791015 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.791060 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.791076 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.791092 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.791101 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: E1121 14:04:42.802158 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:42Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.810769 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.810811 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.810847 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.810864 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.810875 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: E1121 14:04:42.825757 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:42Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.830563 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.830762 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.830786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.830806 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.830845 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: E1121 14:04:42.842753 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:42Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.846307 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.846360 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.846374 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.846394 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.846405 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: E1121 14:04:42.861862 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:42Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:42 crc kubenswrapper[4774]: E1121 14:04:42.862053 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.864784 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.864860 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.864875 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.864898 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.864914 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.967969 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.968340 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.968352 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.968369 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:42 crc kubenswrapper[4774]: I1121 14:04:42.968380 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:42Z","lastTransitionTime":"2025-11-21T14:04:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.070781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.070808 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.070832 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.070845 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.070855 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.092229 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.092271 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.092324 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:43 crc kubenswrapper[4774]: E1121 14:04:43.092347 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.092330 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:43 crc kubenswrapper[4774]: E1121 14:04:43.092444 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:43 crc kubenswrapper[4774]: E1121 14:04:43.092537 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:43 crc kubenswrapper[4774]: E1121 14:04:43.092613 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.173749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.173787 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.173798 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.173849 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.173863 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.276361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.276395 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.276404 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.276417 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.276427 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.378937 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.379020 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.379037 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.379065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.379081 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.483127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.483180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.483192 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.483212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.483226 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.586120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.586168 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.586180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.586199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.586213 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.691001 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.691065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.691087 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.691119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.691142 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.793293 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.793336 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.793346 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.793361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.793371 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.896258 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.896302 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.896315 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.896332 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.896343 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.999631 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.999711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.999730 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:43 crc kubenswrapper[4774]: I1121 14:04:43.999756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:43.999775 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:43Z","lastTransitionTime":"2025-11-21T14:04:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.101739 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.101781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.101790 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.101806 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.101828 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.204703 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.204749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.204759 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.204776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.204788 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.307740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.307781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.307789 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.307806 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.307837 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.410710 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.410766 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.410777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.410800 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.410812 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.513605 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.513689 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.513700 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.513718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.513728 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.616528 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.616579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.616594 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.616616 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.616659 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.720312 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.720364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.720373 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.720389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.720400 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.823094 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.823149 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.823159 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.823180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.823193 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.926315 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.926368 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.926379 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.926395 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:44 crc kubenswrapper[4774]: I1121 14:04:44.926406 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:44Z","lastTransitionTime":"2025-11-21T14:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.029532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.029569 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.029579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.029593 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.029603 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.092627 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.092707 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:45 crc kubenswrapper[4774]: E1121 14:04:45.092767 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:45 crc kubenswrapper[4774]: E1121 14:04:45.092907 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.092973 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.093002 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:45 crc kubenswrapper[4774]: E1121 14:04:45.093032 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:45 crc kubenswrapper[4774]: E1121 14:04:45.093083 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.132333 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.132441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.132458 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.132482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.132499 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.235485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.235544 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.235567 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.235595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.235618 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.338837 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.338880 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.338889 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.338906 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.338915 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.442316 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.442382 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.442394 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.442416 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.442431 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.546163 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.546217 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.546232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.546251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.546263 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.649641 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.649680 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.649692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.649705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.649714 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.751665 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.751714 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.751727 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.751743 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.751754 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.855044 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.855123 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.855144 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.855191 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.855209 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.957871 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.957949 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.957974 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.958006 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:45 crc kubenswrapper[4774]: I1121 14:04:45.958034 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:45Z","lastTransitionTime":"2025-11-21T14:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.061647 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.061719 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.061744 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.061778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.061804 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.164691 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.164749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.164760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.164776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.164788 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.266842 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.267050 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.267064 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.267079 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.267089 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.370152 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.370197 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.370213 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.370232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.370244 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.473209 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.473291 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.473325 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.473357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.473378 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.575535 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.575628 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.575653 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.575681 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.575748 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.678040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.678088 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.678101 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.678116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.678126 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.781103 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.781188 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.781222 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.781252 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.781272 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.884685 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.884747 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.884769 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.884799 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.885056 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.988380 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.988428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.988440 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.988468 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:46 crc kubenswrapper[4774]: I1121 14:04:46.988491 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:46Z","lastTransitionTime":"2025-11-21T14:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.091571 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.091609 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.091620 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.091634 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.091646 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.092282 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.092293 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.092365 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:47 crc kubenswrapper[4774]: E1121 14:04:47.092383 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.092495 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:47 crc kubenswrapper[4774]: E1121 14:04:47.092543 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:47 crc kubenswrapper[4774]: E1121 14:04:47.092498 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:47 crc kubenswrapper[4774]: E1121 14:04:47.092647 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.195100 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.195162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.195183 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.195212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.195237 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.297419 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.297489 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.297512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.297541 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.297561 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.401219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.401292 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.401312 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.401337 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.401356 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.503535 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.503585 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.503598 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.503618 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.503631 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.606730 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.606777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.606786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.606805 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.606831 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.709156 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.709199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.709210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.709225 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.709236 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.812299 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.812361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.812377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.812403 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.812418 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.914979 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.915016 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.915025 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.915041 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:47 crc kubenswrapper[4774]: I1121 14:04:47.915049 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:47Z","lastTransitionTime":"2025-11-21T14:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.017862 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.017917 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.017935 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.017962 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.017980 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.094531 4774 scope.go:117] "RemoveContainer" containerID="3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.121117 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.121157 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.121170 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.121189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.121203 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.223299 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.223331 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.223343 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.223362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.223377 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.325770 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.325808 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.325841 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.325859 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.325871 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.427520 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.427547 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.427557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.427570 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.427579 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.529976 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.530009 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.530018 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.530033 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.530042 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.574490 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/2.log" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.577873 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.578457 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.591102 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.604605 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.620449 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.632713 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.632748 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.632760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.632777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.632788 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.635558 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.653224 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.669357 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.681810 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.694454 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.705726 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.719494 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.732331 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.734776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.734800 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.734807 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.734846 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.734857 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.757587 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.771597 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.782523 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.796852 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.810337 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.822722 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:48Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.837601 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.837632 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.837642 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.837656 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.837666 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.940486 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.940526 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.940537 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.940552 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:48 crc kubenswrapper[4774]: I1121 14:04:48.940564 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:48Z","lastTransitionTime":"2025-11-21T14:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.042874 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.042921 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.042931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.042949 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.042958 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.092861 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.092973 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:49 crc kubenswrapper[4774]: E1121 14:04:49.093025 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.093096 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:49 crc kubenswrapper[4774]: E1121 14:04:49.093199 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.093107 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:49 crc kubenswrapper[4774]: E1121 14:04:49.093295 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:49 crc kubenswrapper[4774]: E1121 14:04:49.093459 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.146321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.146373 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.146385 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.146407 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.146422 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.248963 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.249011 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.249025 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.249044 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.249056 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.352285 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.352328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.352339 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.352358 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.352371 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.454728 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.454785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.454798 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.454827 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.454837 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.557764 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.557801 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.557812 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.557837 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.557846 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.583898 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/3.log" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.584722 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/2.log" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.587565 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" exitCode=1 Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.587629 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.587666 4774 scope.go:117] "RemoveContainer" containerID="3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.588861 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:04:49 crc kubenswrapper[4774]: E1121 14:04:49.589308 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.620805 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:48Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:48.867319 6825 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:48.867333 6825 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 14:04:48.867344 6825 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:48.867344 6825 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:48.867351 6825 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14:04:48.867360 6825 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:48.867373 6825 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:48.867400 6825 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:48.867620 6825 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1121 14:04:48.867746 6825 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1121 14:04:48.867788 6825 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 14:04:48.867856 6825 factory.go:656] Stopping watch factory\\\\nI1121 14:04:48.867876 6825 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:48.867903 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:48.867911 6825 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nF1121 14:04:48.867984 6825 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.637198 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.651864 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.660426 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.660450 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.660457 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.660469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.660478 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.666955 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.678564 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.690464 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.703893 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.718353 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.728652 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.742610 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.754167 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.763374 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.763415 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.763424 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.763440 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.763449 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.768170 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.779288 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.792979 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.804124 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.818205 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.832456 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:49Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.866079 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.866110 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.866119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.866169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.866182 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.968059 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.968129 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.968147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.968173 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:49 crc kubenswrapper[4774]: I1121 14:04:49.968192 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:49Z","lastTransitionTime":"2025-11-21T14:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.071361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.071449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.071470 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.071498 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.071519 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.112421 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.128121 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.142530 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.154765 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.168370 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.173639 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.173686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.173705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.173729 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.173747 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.185119 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.202010 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.212713 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.228507 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.241220 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.253573 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.270368 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.276451 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.276492 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.276508 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.276526 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.276538 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.285316 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.299371 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.314627 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.328380 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.358132 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a780a0ecf48cc98f932dc9999eb74ce538ed89cb1d9df704acb25775e1812c1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:22Z\\\",\\\"message\\\":\\\"pe:ClusterIP,ExternalIPs:[],SessionAffinity:None,LoadBalancerIP:,LoadBalancerSourceRanges:[],ExternalName:,ExternalTrafficPolicy:,HealthCheckNodePort:0,PublishNotReadyAddresses:false,SessionAffinityConfig:nil,IPFamilyPolicy:*SingleStack,ClusterIPs:[10.217.5.58],IPFamilies:[IPv4],AllocateLoadBalancerNodePorts:nil,LoadBalancerClass:nil,InternalTrafficPolicy:*Cluster,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1121 14:04:22.034523 6464 services_controller.go:454] Service openshift-operator-lifecycle-manager/catalog-operator-metrics for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nI1121 14:04:22.034383 6464 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-z77mh after 0 failed attempt(s)\\\\nI1121 14:04:22.034537 6464 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-z77mh\\\\nI1121 14:04:22.034540 6464 lb_config.go:1031] Cluster endpoints for openshift-controller-manager-operator/metrics for network=default are: map[]\\\\nI1121 14:04:22.034552 6464 services_controller.go:443] Built service openshift-controller-manager-operator/metrics LB cluster-wide configs for network=defau\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:21Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:48Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:48.867319 6825 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:48.867333 6825 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 14:04:48.867344 6825 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:48.867344 6825 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:48.867351 6825 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14:04:48.867360 6825 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:48.867373 6825 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:48.867400 6825 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:48.867620 6825 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1121 14:04:48.867746 6825 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1121 14:04:48.867788 6825 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 14:04:48.867856 6825 factory.go:656] Stopping watch factory\\\\nI1121 14:04:48.867876 6825 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:48.867903 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:48.867911 6825 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nF1121 14:04:48.867984 6825 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.379324 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.379385 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.379403 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.379428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.379447 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.482677 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.482723 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.482741 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.482760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.482774 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.585315 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.585362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.585373 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.585391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.585408 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.592698 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/3.log" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.597253 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:04:50 crc kubenswrapper[4774]: E1121 14:04:50.597488 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.611956 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.631898 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.653968 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.669362 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.683401 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.688493 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.688628 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.688644 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.688666 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.688682 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.703248 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.716775 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.732899 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.749759 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.761463 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.775576 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.788168 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.790515 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.790547 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.790561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.790582 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.790598 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.800481 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.814922 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.828415 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.844272 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.874597 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:48Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:48.867319 6825 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:48.867333 6825 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 14:04:48.867344 6825 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:48.867344 6825 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:48.867351 6825 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14:04:48.867360 6825 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:48.867373 6825 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:48.867400 6825 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:48.867620 6825 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1121 14:04:48.867746 6825 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1121 14:04:48.867788 6825 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 14:04:48.867856 6825 factory.go:656] Stopping watch factory\\\\nI1121 14:04:48.867876 6825 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:48.867903 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:48.867911 6825 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nF1121 14:04:48.867984 6825 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:50Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.893308 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.893354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.893365 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.893384 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.893396 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.995588 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.995634 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.995646 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.995667 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:50 crc kubenswrapper[4774]: I1121 14:04:50.995679 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:50Z","lastTransitionTime":"2025-11-21T14:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.092730 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.092781 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.092878 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:51 crc kubenswrapper[4774]: E1121 14:04:51.092887 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.093014 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:51 crc kubenswrapper[4774]: E1121 14:04:51.093175 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:51 crc kubenswrapper[4774]: E1121 14:04:51.093510 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:51 crc kubenswrapper[4774]: E1121 14:04:51.094006 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.098844 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.098873 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.098882 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.098894 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.098904 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.201800 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.201870 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.201884 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.201905 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.201916 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.306295 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.306349 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.306361 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.306384 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.306398 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.409246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.409312 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.409323 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.409336 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.409346 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.511526 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.511661 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.511681 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.511706 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.511754 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.613878 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.613933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.613945 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.613963 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.613977 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.716673 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.717063 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.717074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.717095 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.717107 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.820277 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.820313 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.820321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.820339 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.820350 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.922224 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.922283 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.922291 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.922306 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:51 crc kubenswrapper[4774]: I1121 14:04:51.922352 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:51Z","lastTransitionTime":"2025-11-21T14:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.024701 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.024755 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.024775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.024799 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.024843 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.128066 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.128142 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.128163 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.128189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.128205 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.230987 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.231063 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.231105 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.231137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.231158 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.334428 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.334478 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.334493 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.334514 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.334532 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.443326 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.443359 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.443376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.443396 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.443406 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.546144 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.546198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.546221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.546244 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.546257 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.648959 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.649021 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.649036 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.649057 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.649068 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.751933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.751986 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.751999 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.752017 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.752033 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.854936 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.855002 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.855019 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.855041 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.855056 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.958705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.958788 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.958805 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.958866 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:52 crc kubenswrapper[4774]: I1121 14:04:52.958883 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:52Z","lastTransitionTime":"2025-11-21T14:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.063157 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.063239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.063263 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.063295 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.063319 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.092919 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.092967 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.093195 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.093238 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.093350 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.093413 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.093776 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.093931 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.113768 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.127326 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.127482 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.127512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.127543 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.127565 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.143969 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.148300 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.148368 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.148389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.148410 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.148427 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.165930 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.170182 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.170223 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.170233 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.170252 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.170263 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.182156 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.185895 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.185933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.185944 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.185958 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.185968 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.201101 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.204528 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.204589 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.204598 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.204612 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.204623 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.220345 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:04:53Z is after 2025-08-24T17:21:41Z" Nov 21 14:04:53 crc kubenswrapper[4774]: E1121 14:04:53.220467 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.222189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.222223 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.222232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.222246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.222255 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.325595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.325642 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.325652 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.325669 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.325678 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.429629 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.429692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.429705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.429728 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.429742 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.531909 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.532001 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.532016 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.532038 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.532051 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.635115 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.635197 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.635210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.635228 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.635247 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.738430 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.738495 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.738512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.738540 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.738562 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.841921 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.841980 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.841998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.842023 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.842041 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.944990 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.945056 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.945075 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.945110 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:53 crc kubenswrapper[4774]: I1121 14:04:53.945129 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:53Z","lastTransitionTime":"2025-11-21T14:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.047867 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.047905 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.047913 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.047928 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.047937 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.150882 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.151056 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.151070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.151084 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.151093 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.253432 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.253468 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.253477 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.253499 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.253514 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.356562 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.356608 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.356628 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.356658 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.356678 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.459711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.459761 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.459773 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.459790 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.459804 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.561809 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.561886 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.561895 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.561907 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.561921 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.664510 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.664550 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.664561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.664578 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.664591 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.767434 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.767512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.767542 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.767572 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.767593 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.870377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.870419 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.870431 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.870448 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.870459 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.892863 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.892963 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.892944654 +0000 UTC m=+149.545143913 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.893002 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.893032 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.893066 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.893088 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893107 4774 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893151 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.89314055 +0000 UTC m=+149.545339809 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893215 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893232 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893225 4774 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893324 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.893297134 +0000 UTC m=+149.545496423 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893330 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893372 4774 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893244 4774 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893396 4774 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893566 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.893479679 +0000 UTC m=+149.545678978 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:54 crc kubenswrapper[4774]: E1121 14:04:54.893666 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.893595612 +0000 UTC m=+149.545794911 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.972357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.972412 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.972429 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.972449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:54 crc kubenswrapper[4774]: I1121 14:04:54.972464 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:54Z","lastTransitionTime":"2025-11-21T14:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.076285 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.076346 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.076357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.076379 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.076392 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.092690 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.092755 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.092766 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.092685 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:55 crc kubenswrapper[4774]: E1121 14:04:55.092972 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:55 crc kubenswrapper[4774]: E1121 14:04:55.093143 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:55 crc kubenswrapper[4774]: E1121 14:04:55.093245 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:55 crc kubenswrapper[4774]: E1121 14:04:55.093362 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.179253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.179294 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.179306 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.179324 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.179335 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.282813 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.282874 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.282890 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.282970 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.282986 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.386979 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.387047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.387084 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.387151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.387165 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.490314 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.490400 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.490434 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.490470 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.490494 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.593785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.593858 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.593870 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.593891 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.593907 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.698180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.698259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.698277 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.698317 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.698334 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.801336 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.801414 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.801455 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.801483 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.801500 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.904785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.904931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.904952 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.904981 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:55 crc kubenswrapper[4774]: I1121 14:04:55.905000 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:55Z","lastTransitionTime":"2025-11-21T14:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.007788 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.007888 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.007907 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.007933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.007953 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.110763 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.110882 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.110909 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.110941 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.110964 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.214060 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.214107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.214118 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.214132 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.214141 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.317254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.317348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.317371 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.317402 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.317426 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.420985 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.421037 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.421049 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.421071 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.421089 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.523674 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.523710 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.523718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.523735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.523744 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.628670 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.628733 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.628746 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.628766 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.628785 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.732571 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.732637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.732660 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.732696 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.732717 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.835118 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.835173 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.835182 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.835196 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.835205 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.937836 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.937891 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.937905 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.937923 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:56 crc kubenswrapper[4774]: I1121 14:04:56.937941 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:56Z","lastTransitionTime":"2025-11-21T14:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.040090 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.040186 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.040209 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.040239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.040262 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.092733 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.092804 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.092792 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.092940 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:57 crc kubenswrapper[4774]: E1121 14:04:57.093072 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:57 crc kubenswrapper[4774]: E1121 14:04:57.093254 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:57 crc kubenswrapper[4774]: E1121 14:04:57.093294 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:57 crc kubenswrapper[4774]: E1121 14:04:57.093354 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.142750 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.142808 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.142844 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.142867 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.142882 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.246383 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.246458 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.246475 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.246500 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.246517 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.349329 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.349412 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.349435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.349461 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.349482 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.452713 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.452777 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.452799 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.452869 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.452894 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.556335 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.556423 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.556439 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.556457 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.556470 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.659000 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.659074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.659102 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.659132 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.659154 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.762291 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.762349 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.762405 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.762438 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.762456 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.866169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.866220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.866237 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.866266 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.866287 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.968999 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.969052 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.969063 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.969080 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:57 crc kubenswrapper[4774]: I1121 14:04:57.969092 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:57Z","lastTransitionTime":"2025-11-21T14:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.072553 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.072606 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.072620 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.072638 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.072652 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.176082 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.176128 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.176138 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.176159 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.176170 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.278920 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.279012 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.279026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.279043 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.279054 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.382198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.382258 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.382277 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.382307 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.382332 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.485259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.485326 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.485343 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.485367 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.485384 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.587689 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.587730 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.587737 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.587751 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.587760 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.690328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.690395 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.690413 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.690441 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.690459 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.793504 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.793560 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.793578 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.793606 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.793624 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.895894 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.895951 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.895960 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.895974 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.895999 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.998939 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.999022 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.999045 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.999070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:58 crc kubenswrapper[4774]: I1121 14:04:58.999088 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:58Z","lastTransitionTime":"2025-11-21T14:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.092424 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.092469 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.092517 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.092426 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:04:59 crc kubenswrapper[4774]: E1121 14:04:59.092656 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:04:59 crc kubenswrapper[4774]: E1121 14:04:59.092758 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:04:59 crc kubenswrapper[4774]: E1121 14:04:59.092957 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:04:59 crc kubenswrapper[4774]: E1121 14:04:59.093144 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.102347 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.102381 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.102390 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.102407 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.102420 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.205639 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.205731 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.205753 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.205774 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.205786 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.308294 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.308348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.308362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.308383 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.308398 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.416301 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.416341 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.416353 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.416371 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.416383 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.518786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.518841 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.518851 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.518867 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.518877 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.622659 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.622703 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.622721 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.622746 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.622764 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.727189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.727583 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.727732 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.727893 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.728030 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.831897 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.831990 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.832013 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.832040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.832061 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.934980 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.935038 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.935054 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.935077 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:04:59 crc kubenswrapper[4774]: I1121 14:04:59.935094 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:04:59Z","lastTransitionTime":"2025-11-21T14:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.038072 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.038489 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.038684 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.038915 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.039120 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.114698 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.134611 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.142648 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.142734 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.142760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.142792 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.142815 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.152707 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.174113 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.194334 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.211135 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.242927 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:48Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:48.867319 6825 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:48.867333 6825 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 14:04:48.867344 6825 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:48.867344 6825 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:48.867351 6825 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14:04:48.867360 6825 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:48.867373 6825 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:48.867400 6825 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:48.867620 6825 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1121 14:04:48.867746 6825 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1121 14:04:48.867788 6825 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 14:04:48.867856 6825 factory.go:656] Stopping watch factory\\\\nI1121 14:04:48.867876 6825 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:48.867903 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:48.867911 6825 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nF1121 14:04:48.867984 6825 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.245137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.245215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.245233 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.245254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.245269 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.261202 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.274721 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.286590 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.296201 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.306559 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.318657 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.341237 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2af5e22-0e01-4c13-b013-bed135aa26b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1013e632d33c3fc793f0d300057ba4e6f5d9a0e64918ee4d30ccef681e30ed31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://745749aa8f2d100f5230d5832b51dc3cefe56c1574fc4e9471a6a26fe92d20b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caa45d7f18c6249861d19436f2485af14fcdb827733dbc7dbdb98237380ce122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0771517e6b110b86a46fac953bb40e60415f85d35ad1b3105dd7a6c9168382ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2670cfdaa01722ec3dc500fc37686a0e697f697fcec4bc8fc4e9353b3696ff0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b3679ff721b506bdc69a59e5e6966b9782d9a318cd229cf058ac720bceb65a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b3679ff721b506bdc69a59e5e6966b9782d9a318cd229cf058ac720bceb65a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e2e38d7efe1e4ada3b421dbb47abeb4eb3417e096b2e8914675163d69c3df08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e2e38d7efe1e4ada3b421dbb47abeb4eb3417e096b2e8914675163d69c3df08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://41d08de6615afa8b7559b50734c114546f51cccb2255bad507a58ded880eb424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41d08de6615afa8b7559b50734c114546f51cccb2255bad507a58ded880eb424\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.348195 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.348228 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.348238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.348255 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.348267 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.357725 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.372116 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.386396 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.403283 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:00Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.451458 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.451501 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.451512 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.451568 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.451583 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.554478 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.554543 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.554557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.554583 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.554595 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.657319 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.657383 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.657401 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.657425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.657445 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.760472 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.760535 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.760551 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.760575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.760595 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.863147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.863190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.863200 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.863219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.863239 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.966179 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.966264 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.966279 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.966297 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:00 crc kubenswrapper[4774]: I1121 14:05:00.966308 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:00Z","lastTransitionTime":"2025-11-21T14:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.068837 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.068884 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.068900 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.068917 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.068928 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.092888 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.092955 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:01 crc kubenswrapper[4774]: E1121 14:05:01.093042 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:01 crc kubenswrapper[4774]: E1121 14:05:01.093180 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.093219 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:01 crc kubenswrapper[4774]: E1121 14:05:01.093317 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.093864 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:01 crc kubenswrapper[4774]: E1121 14:05:01.093993 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.171496 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.171640 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.171666 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.171702 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.171727 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.273628 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.273668 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.273678 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.273694 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.273705 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.376483 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.376537 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.376553 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.376575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.376587 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.478274 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.478343 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.478365 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.478389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.478401 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.581058 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.581101 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.581112 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.581127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.581139 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.683500 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.683549 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.683561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.683579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.683592 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.787478 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.787544 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.787565 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.787590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.787610 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.891168 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.891307 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.891329 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.891354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.891370 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.994141 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.994188 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.994205 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.994229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:01 crc kubenswrapper[4774]: I1121 14:05:01.994247 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:01Z","lastTransitionTime":"2025-11-21T14:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.097048 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.097095 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.097119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.097148 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.097182 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.109162 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.200231 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.200262 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.200271 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.200285 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.200296 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.302509 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.302591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.302603 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.302621 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.302631 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.404994 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.405065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.405088 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.405115 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.405139 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.507169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.507210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.507221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.507238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.507251 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.610775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.610856 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.610874 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.610897 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.610914 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.720656 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.720715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.720733 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.720760 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.720779 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.824469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.824539 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.824562 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.824590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.824610 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.927591 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.928449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.928651 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.928804 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:02 crc kubenswrapper[4774]: I1121 14:05:02.928980 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:02Z","lastTransitionTime":"2025-11-21T14:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.031978 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.032058 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.032081 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.032113 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.032133 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.092189 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.092303 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.092379 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.092375 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.092588 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.092677 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.093152 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.093290 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.136231 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.136315 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.136348 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.136376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.136399 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.239104 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.239163 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.239181 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.239209 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.239252 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.246189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.246219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.246231 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.246245 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.246255 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.262474 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.267034 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.267072 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.267088 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.267107 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.267122 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.287339 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.291776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.291803 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.291829 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.291845 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.291855 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.311521 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.316702 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.316738 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.316751 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.316766 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.316776 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.335617 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.340553 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.340588 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.340600 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.340615 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.340627 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.355611 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:03Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:03 crc kubenswrapper[4774]: E1121 14:05:03.355722 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.357177 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.357222 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.357238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.357262 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.357277 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.460780 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.461092 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.461166 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.461243 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.461318 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.564740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.565111 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.565253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.565440 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.565586 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.668402 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.668467 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.668497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.668532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.668558 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.771558 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.771666 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.771676 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.771692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.771701 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.874851 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.874911 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.874927 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.874951 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.874970 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.977672 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.977723 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.977735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.977756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:03 crc kubenswrapper[4774]: I1121 14:05:03.977766 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:03Z","lastTransitionTime":"2025-11-21T14:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.080427 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.080474 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.080492 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.080511 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.080526 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.092917 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:05:04 crc kubenswrapper[4774]: E1121 14:05:04.093068 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.184614 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.184718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.184756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.184779 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.184792 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.288061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.288169 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.288190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.288550 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.288908 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.392376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.392479 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.392532 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.392556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.392573 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.495846 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.495909 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.495933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.495964 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.495987 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.599347 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.599431 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.599456 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.599492 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.599515 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.702403 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.702477 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.702539 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.702564 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.702579 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.805658 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.805698 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.805711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.805728 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.805739 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.909121 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.909490 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.909514 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.909546 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:04 crc kubenswrapper[4774]: I1121 14:05:04.909566 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:04Z","lastTransitionTime":"2025-11-21T14:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.012585 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.012649 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.012667 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.012694 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.012711 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.092024 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.092080 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.092128 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:05 crc kubenswrapper[4774]: E1121 14:05:05.092139 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.092099 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:05 crc kubenswrapper[4774]: E1121 14:05:05.092209 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:05 crc kubenswrapper[4774]: E1121 14:05:05.092269 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:05 crc kubenswrapper[4774]: E1121 14:05:05.092302 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.116175 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.116222 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.116240 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.116259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.116271 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.221543 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.221584 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.221595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.221611 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.221623 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.323566 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.323618 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.323631 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.323651 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.323665 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.426539 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.426596 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.426614 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.426640 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.426692 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.530775 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.530895 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.530920 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.530954 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.530981 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.633370 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.633411 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.633422 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.633438 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.633449 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.735618 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.735678 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.735701 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.735726 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.735742 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.838354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.838452 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.838468 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.838485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.838498 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.940948 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.941015 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.941032 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.941056 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:05 crc kubenswrapper[4774]: I1121 14:05:05.941072 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:05Z","lastTransitionTime":"2025-11-21T14:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.044134 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.044213 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.044232 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.044263 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.044280 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.148344 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.148407 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.148423 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.148448 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.148466 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.251637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.251721 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.251756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.251788 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.251811 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.354524 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.354607 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.354635 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.354668 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.354689 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.457958 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.458040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.458065 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.458096 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.458122 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.561999 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.562219 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.562261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.562295 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.562319 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.665074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.665120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.665132 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.665147 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.665158 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.767686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.767764 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.767787 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.767854 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.767873 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.870651 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.870706 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.870727 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.870750 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.870766 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.974928 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.975005 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.975040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.975072 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:06 crc kubenswrapper[4774]: I1121 14:05:06.975095 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:06Z","lastTransitionTime":"2025-11-21T14:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.078883 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.078942 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.078961 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.079004 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.079024 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.092453 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.092544 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:07 crc kubenswrapper[4774]: E1121 14:05:07.092615 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.092730 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.093000 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:07 crc kubenswrapper[4774]: E1121 14:05:07.092994 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:07 crc kubenswrapper[4774]: E1121 14:05:07.093081 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:07 crc kubenswrapper[4774]: E1121 14:05:07.093155 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.182918 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.183074 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.183097 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.183120 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.183138 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.286495 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.286556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.286579 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.286608 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.286627 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.390124 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.390206 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.390223 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.390250 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.390266 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.493259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.493323 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.493338 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.493363 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.493381 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.595714 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.595776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.595789 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.595843 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.595866 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.698915 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.698985 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.699008 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.699041 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.699064 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.801989 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.802053 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.802070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.802096 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.802134 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.905735 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.905814 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.905892 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.905942 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:07 crc kubenswrapper[4774]: I1121 14:05:07.905972 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:07Z","lastTransitionTime":"2025-11-21T14:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.009084 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.009153 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.009177 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.009206 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.009229 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.111608 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.111654 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.111666 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.111682 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.111691 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.214353 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.214399 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.214408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.214423 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.214434 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.318168 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.318247 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.318270 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.318305 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.318328 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.421336 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.421409 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.421435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.421469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.421490 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.524740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.524897 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.524935 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.524967 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.524990 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.627574 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.627640 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.627692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.627724 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.627750 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.730253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.730298 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.730310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.730328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.730343 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.832332 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.832368 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.832378 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.832392 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.832401 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.934886 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.934944 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.934962 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.934988 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:08 crc kubenswrapper[4774]: I1121 14:05:08.935007 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:08Z","lastTransitionTime":"2025-11-21T14:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.038654 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.038703 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.038718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.038739 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.038753 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.048629 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:09 crc kubenswrapper[4774]: E1121 14:05:09.048852 4774 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:05:09 crc kubenswrapper[4774]: E1121 14:05:09.048942 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs podName:0d294e10-6a0e-4871-871c-01fb8e7ead03 nodeName:}" failed. No retries permitted until 2025-11-21 14:06:13.048917312 +0000 UTC m=+163.701116581 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs") pod "network-metrics-daemon-44mbn" (UID: "0d294e10-6a0e-4871-871c-01fb8e7ead03") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.092975 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.093024 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.093033 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:09 crc kubenswrapper[4774]: E1121 14:05:09.093104 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:09 crc kubenswrapper[4774]: E1121 14:05:09.093229 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.093270 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:09 crc kubenswrapper[4774]: E1121 14:05:09.093387 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:09 crc kubenswrapper[4774]: E1121 14:05:09.093465 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.141204 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.141280 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.141293 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.141310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.141321 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.244943 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.244988 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.245000 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.245017 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.245030 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.348051 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.348140 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.348163 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.348199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.348216 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.451516 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.451595 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.451619 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.451652 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.451674 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.553711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.553781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.553801 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.553860 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.553883 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.660040 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.660105 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.660126 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.660163 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.660181 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.762923 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.762982 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.762999 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.763022 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.763040 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.866685 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.866741 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.866759 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.866786 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.866809 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.969802 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.969903 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.969926 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.969955 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:09 crc kubenswrapper[4774]: I1121 14:05:09.969977 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:09Z","lastTransitionTime":"2025-11-21T14:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.073609 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.073669 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.073686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.073711 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.073729 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.126145 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d2af5e22-0e01-4c13-b013-bed135aa26b4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1013e632d33c3fc793f0d300057ba4e6f5d9a0e64918ee4d30ccef681e30ed31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://745749aa8f2d100f5230d5832b51dc3cefe56c1574fc4e9471a6a26fe92d20b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://caa45d7f18c6249861d19436f2485af14fcdb827733dbc7dbdb98237380ce122\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0771517e6b110b86a46fac953bb40e60415f85d35ad1b3105dd7a6c9168382ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d2670cfdaa01722ec3dc500fc37686a0e697f697fcec4bc8fc4e9353b3696ff0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9b3679ff721b506bdc69a59e5e6966b9782d9a318cd229cf058ac720bceb65a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b9b3679ff721b506bdc69a59e5e6966b9782d9a318cd229cf058ac720bceb65a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e2e38d7efe1e4ada3b421dbb47abeb4eb3417e096b2e8914675163d69c3df08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e2e38d7efe1e4ada3b421dbb47abeb4eb3417e096b2e8914675163d69c3df08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://41d08de6615afa8b7559b50734c114546f51cccb2255bad507a58ded880eb424\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41d08de6615afa8b7559b50734c114546f51cccb2255bad507a58ded880eb424\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.149907 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cce2a637ac3758cde68a0fd19b2b55508d9c9b50877e15e4ee9e95090d7a291\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.172206 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://903512d3d1dbd9cea15f4e84a3b00cce1675c22d9fb56a69fc980362a5b85ab6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80c3705211a93d8587008fe13a575922f78684ae01e288bfad23af6d7f65e88b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.177984 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.178086 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.178111 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.178180 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.178202 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.191555 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-gn24l" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a7381e8b-9fee-4279-84e7-e3e51eecf0f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://16e35fe2a4c93925e36f1ca9353506c0769a5e5e207eca0d63e570e0a4596f00\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b9zv9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-gn24l\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.218931 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-q452c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"48241a35-9491-44a3-aeef-5bd2424490a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://003cae8f96c4ab950c050bd318c1ddd47db60271d36ce3c0214946a2035fcc76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bcd8c8368d487c89bb70e892302628f64dc8075a149dfb50ab726a0b96b37c07\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f13d1fa0c6c522a8043eb9edcb16bda942eb650ce2e8124a36a4269b5bee5fe2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f10f2ff7cc80b817215d472ca1c4447cbe91a8e3de4b72f47da99c94bfcbd1bb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5455f09d73eaf8c32c2e508d37966511afecc06c2861c68de03c4655cf82f661\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18c1cd6b01107c8965d6b10d7c1e3c90f49be5aecc5657a3f4ae3bc1c031548d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61f12dc60141685fd84a7b033a035220efb0ac8dc28a54862c3df3903ac76f79\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rwdmg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-q452c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.235300 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-44mbn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0d294e10-6a0e-4871-871c-01fb8e7ead03\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:05Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkbdr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:05Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-44mbn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.255474 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"058f88be-1899-49bb-bf29-3bf3b7cb2390\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42f99d46d1f1753df0f8a54d93f002ca73a8e8858d19ecf7a9e54c160da706c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://68326dd63948c190b137a32c3eedf14687217bfda2ae34663e2210e36ee674f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5002b2d265cc981c6c3d987b1a4f83a433fa4c48bb0c49213767b0971e486998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://676b6ed4590f5f0fba2538cc723a3db0574e3b57dc4e8a1409b63a7870f1f45b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.275326 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"747d6ab8-8aae-4fac-bbc7-edb08b7a89e4\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54b142cdc4f369e06858bf91232a83c7333d45855700ce01defff35389225b22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1bf4070c52d0efcd83b293dd8102c3dc91563a0893f73ef9664ebc999f38a97e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e6a8f53660e5ab6ddadd0ae985773b591a8b466906a611e8e81aca7dfd8b50b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e957aaab7b2b7b059aacbde408325af358cfe6a3f12d072f11149a2fba378573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.281505 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.281557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.281570 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.281590 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.281603 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.295402 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.310577 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.324608 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:55Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d63515cfcfc34c8ff53fb5ade17852b053f820476bf5200aa23a4373506f9cd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.340767 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hdxzw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0bf8b868-6e71-4073-a9ad-e2ac8ae15215\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:38Z\\\",\\\"message\\\":\\\"2025-11-21T14:03:53+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6\\\\n2025-11-21T14:03:53+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4a0601a2-b044-4365-a35e-1f6e4490f3f6 to /host/opt/cni/bin/\\\\n2025-11-21T14:03:53Z [verbose] multus-daemon started\\\\n2025-11-21T14:03:53Z [verbose] Readiness Indicator file check\\\\n2025-11-21T14:04:38Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kkbqf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hdxzw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.351985 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"95780a02-f82d-4540-9852-b567d2e5150c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://53ebc6440be16bb65dec8507dcec929d05acc550d545ab97897cc72e8b78728e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7de89a56ea1de3ce8a96253dbcd31024728759e58e5a8c2bf0c845497c6f47dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7de89a56ea1de3ce8a96253dbcd31024728759e58e5a8c2bf0c845497c6f47dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.374467 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4057b5ee-926e-4931-b5a0-2c204d18ce72\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-21T14:04:48Z\\\",\\\"message\\\":\\\"handler.go:208] Removed *v1.Node event handler 7\\\\nI1121 14:04:48.867319 6825 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1121 14:04:48.867333 6825 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1121 14:04:48.867344 6825 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1121 14:04:48.867344 6825 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1121 14:04:48.867351 6825 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1121 14:04:48.867360 6825 handler.go:208] Removed *v1.Node event handler 2\\\\nI1121 14:04:48.867373 6825 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1121 14:04:48.867400 6825 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1121 14:04:48.867620 6825 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI1121 14:04:48.867746 6825 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI1121 14:04:48.867788 6825 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1121 14:04:48.867856 6825 factory.go:656] Stopping watch factory\\\\nI1121 14:04:48.867876 6825 ovnkube.go:599] Stopped ovnkube\\\\nI1121 14:04:48.867903 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1121 14:04:48.867911 6825 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nF1121 14:04:48.867984 6825 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:04:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-fkrjk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-rltf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.384194 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.384254 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.384267 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.384283 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.384296 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.395264 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"47726c85-db2c-4561-bf4a-cf8c5ad605b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c2770c2bc8316c5f23230e284ce2e216b7619dedd48df0b9ee2da9b11d569d2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://22be91e2be7ba4e099b3014c081cd9a10fbe1bffd4244976b857d5362bd0bfb0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4916fdd5564b14990cf6c27ff288e3e7bcfc177cb0f6aa5fd0a2a0525cb94cd\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57a5b7bf2cbf3ba32cf55d74acccc042b24710f870fb2c386778045847cc9e29\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e13492399a457e7bef647b9392673dc3689fdfba2ddb54a755cb4221598d8b03\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"le observer\\\\nW1121 14:03:50.903532 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1121 14:03:50.903666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1121 14:03:50.904430 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2164218015/tls.crt::/tmp/serving-cert-2164218015/tls.key\\\\\\\"\\\\nI1121 14:03:51.367155 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1121 14:03:51.370541 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1121 14:03:51.370571 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1121 14:03:51.370597 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1121 14:03:51.370602 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1121 14:03:51.379572 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1121 14:03:51.379608 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379615 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1121 14:03:51.379620 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1121 14:03:51.379623 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1121 14:03:51.379627 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1121 14:03:51.379631 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1121 14:03:51.379837 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1121 14:03:51.382684 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:45Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f5d1359f72d3e448a241cc91ca81473634cb9bb1d071c129ab79f345f2f077b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:33Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c6870bb27aacb58b6542e6ea71db5dd9dbab8aab8661b7867f1369047251070\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-21T14:03:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-21T14:03:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.411634 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.425869 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://918a72cd536ffac9aaf9952993d7ebd1c396cbaecb128dd9ca0a59a935ed3bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s29s9\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:51Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-jtxgb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.438990 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-z77mh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2548a4d5-f3ff-42d9-9183-76ca04816cab\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:03:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b77ac5022fd706b0de01a9be699376747337080ac31640e4d3b710568ceab44\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:03:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vkmfl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:03:57Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-z77mh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.451810 4774 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"12e7b89b-cb7e-4e3e-b9ef-bf63ed997463\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-21T14:04:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://65addbdb533243699dc494e736dbf0d32543b76b9c8d925cd87cb42abe4faf32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c3d01d6003d3b99b250eb4b352295da09d2ed3084cc0be076aad902fad4f407a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-21T14:04:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7m2zp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-21T14:04:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jkjvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:10Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.487908 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.487964 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.487981 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.488005 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.488022 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.591670 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.591736 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.591764 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.591796 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.591858 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.694600 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.694637 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.694647 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.694663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.694674 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.797812 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.797890 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.797914 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.797942 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.797963 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.901722 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.901785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.901802 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.901852 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:10 crc kubenswrapper[4774]: I1121 14:05:10.901871 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:10Z","lastTransitionTime":"2025-11-21T14:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.005220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.005302 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.005334 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.005364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.005384 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.092928 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.093008 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.092972 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.092969 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:11 crc kubenswrapper[4774]: E1121 14:05:11.093151 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:11 crc kubenswrapper[4774]: E1121 14:05:11.093312 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:11 crc kubenswrapper[4774]: E1121 14:05:11.093475 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:11 crc kubenswrapper[4774]: E1121 14:05:11.093667 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.108286 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.108350 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.108372 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.108401 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.108425 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.211995 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.212064 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.212088 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.212116 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.212139 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.315554 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.315659 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.315715 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.315739 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.315756 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.419129 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.419206 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.419229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.419259 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.419283 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.522916 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.522998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.523018 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.523036 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.523052 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.626221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.626281 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.626297 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.626321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.626337 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.729311 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.729369 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.729389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.729412 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.729429 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.832722 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.832807 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.832853 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.832877 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.832894 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.936296 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.936347 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.936360 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.936377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:11 crc kubenswrapper[4774]: I1121 14:05:11.936386 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:11Z","lastTransitionTime":"2025-11-21T14:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.038949 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.039023 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.039037 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.039055 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.039068 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.141836 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.141883 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.141894 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.141910 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.141920 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.244620 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.244654 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.244663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.244677 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.244685 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.347862 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.347920 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.347933 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.347960 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.347973 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.450342 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.450391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.450401 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.450416 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.450425 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.552884 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.552932 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.552943 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.552962 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.552974 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.655239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.655378 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.655459 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.655491 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.655580 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.759734 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.759805 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.759859 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.759889 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.759910 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.863014 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.863056 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.863068 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.863084 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.863099 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.966191 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.966233 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.966246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.966261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:12 crc kubenswrapper[4774]: I1121 14:05:12.966274 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:12Z","lastTransitionTime":"2025-11-21T14:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.068399 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.068436 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.068445 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.068459 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.068469 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.092696 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.092762 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.092776 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.092989 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.093055 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.093086 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.093141 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.093309 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.171146 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.171190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.171198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.171218 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.171228 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.275435 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.275490 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.275527 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.275557 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.275580 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.378934 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.379061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.379085 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.379113 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.379135 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.449061 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.449129 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.449151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.449177 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.449196 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.463695 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:13Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.469485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.469551 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.469570 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.469593 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.469611 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.487189 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:13Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.491140 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.491175 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.491189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.491207 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.491215 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.506285 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:13Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.511130 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.511188 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.511199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.511217 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.511232 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.524021 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:13Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.528556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.528628 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.528641 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.528662 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.528673 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.543681 4774 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-21T14:05:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1f65383-a207-4db6-9ed6-aa2f40413778\\\",\\\"systemUUID\\\":\\\"076e9bc6-aa9d-40aa-a5d0-6a135a29bfa3\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-21T14:05:13Z is after 2025-08-24T17:21:41Z" Nov 21 14:05:13 crc kubenswrapper[4774]: E1121 14:05:13.543832 4774 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.545829 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.545868 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.545882 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.545902 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.545924 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.648810 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.648935 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.649223 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.649305 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.649329 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.751810 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.751998 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.752018 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.752042 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.752061 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.855185 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.855216 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.855224 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.855239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.855248 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.957162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.957215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.957225 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.957238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:13 crc kubenswrapper[4774]: I1121 14:05:13.957247 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:13Z","lastTransitionTime":"2025-11-21T14:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.060319 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.060362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.060371 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.060385 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.060394 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.164026 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.164105 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.164131 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.164162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.164184 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.268605 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.268666 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.268685 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.268709 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.268727 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.371657 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.371739 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.371753 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.371772 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.371810 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.474443 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.474529 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.474554 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.474586 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.474619 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.577033 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.577156 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.577190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.577212 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.577226 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.680001 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.680119 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.680139 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.680193 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.680205 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.783190 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.783260 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.783278 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.783303 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.783321 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.886296 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.886354 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.886374 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.886396 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.886411 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.989290 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.989364 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.989380 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.989404 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:14 crc kubenswrapper[4774]: I1121 14:05:14.989420 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:14Z","lastTransitionTime":"2025-11-21T14:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092168 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092235 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092251 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092272 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:15 crc kubenswrapper[4774]: E1121 14:05:15.092337 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092478 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092519 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092535 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: E1121 14:05:15.092638 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:15 crc kubenswrapper[4774]: E1121 14:05:15.092768 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:15 crc kubenswrapper[4774]: E1121 14:05:15.092913 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.092960 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:05:15 crc kubenswrapper[4774]: E1121 14:05:15.093090 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.195191 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.195271 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.195297 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.195328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.195350 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.298351 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.298380 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.298388 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.298401 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.298410 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.400931 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.400968 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.400978 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.400995 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.401007 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.503750 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.503811 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.503860 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.503885 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.503902 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.607864 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.607914 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.607923 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.607938 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.607947 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.712126 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.712175 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.712188 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.712238 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.712253 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.816166 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.816218 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.816231 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.816250 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.816262 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.918188 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.918225 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.918234 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.918247 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:15 crc kubenswrapper[4774]: I1121 14:05:15.918256 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:15Z","lastTransitionTime":"2025-11-21T14:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.020686 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.020740 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.020756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.020776 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.020792 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.123461 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.123522 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.123537 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.123561 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.123575 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.226584 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.226651 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.226675 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.226705 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.226725 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.330248 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.330305 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.330322 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.330345 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.330368 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.434199 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.434251 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.434268 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.434315 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.434334 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.537200 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.537253 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.537262 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.537279 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.537289 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.640733 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.640791 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.640811 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.640875 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.640895 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.743300 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.743350 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.743360 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.743376 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.743387 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.845544 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.845699 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.845724 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.845756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.845786 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.948581 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.948618 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.948630 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.948645 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:16 crc kubenswrapper[4774]: I1121 14:05:16.948654 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:16Z","lastTransitionTime":"2025-11-21T14:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.050326 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.050377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.050389 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.050408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.050420 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.092467 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.092510 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.092561 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.092653 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:17 crc kubenswrapper[4774]: E1121 14:05:17.092877 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:17 crc kubenswrapper[4774]: E1121 14:05:17.092959 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:17 crc kubenswrapper[4774]: E1121 14:05:17.093012 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:17 crc kubenswrapper[4774]: E1121 14:05:17.093157 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.153627 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.153679 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.153692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.153712 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.153728 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.256757 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.256844 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.256857 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.256876 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.256888 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.359052 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.359092 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.359104 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.359122 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.359131 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.461778 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.461880 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.461906 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.461940 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.461965 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.565153 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.565247 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.565264 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.565287 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.565303 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.667951 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.668028 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.668047 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.668070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.668087 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.771073 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.771130 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.771151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.771192 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.771228 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.874340 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.874388 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.874401 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.874425 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.874436 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.976808 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.976896 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.976914 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.976938 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:17 crc kubenswrapper[4774]: I1121 14:05:17.976956 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:17Z","lastTransitionTime":"2025-11-21T14:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.080563 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.080616 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.080625 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.080641 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.080651 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.184412 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.184520 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.184543 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.184575 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.184598 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.287245 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.287298 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.287312 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.287333 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.287348 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.390198 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.390266 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.390292 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.390322 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.390343 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.493077 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.493137 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.493149 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.493164 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.493174 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.595454 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.595499 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.595507 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.595522 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.595531 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.697516 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.697562 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.697574 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.697589 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.697601 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.799719 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.799756 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.799766 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.799779 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.799790 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.903001 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.903098 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.903122 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.903151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:18 crc kubenswrapper[4774]: I1121 14:05:18.903174 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:18Z","lastTransitionTime":"2025-11-21T14:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.006155 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.006214 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.006226 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.006244 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.006256 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.092595 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.092596 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.092656 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.092656 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:19 crc kubenswrapper[4774]: E1121 14:05:19.093510 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:19 crc kubenswrapper[4774]: E1121 14:05:19.093682 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:19 crc kubenswrapper[4774]: E1121 14:05:19.093865 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:19 crc kubenswrapper[4774]: E1121 14:05:19.093996 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.109384 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.109446 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.109469 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.109497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.109524 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.212650 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.212699 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.212724 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.212745 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.212760 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.317244 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.317300 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.317317 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.317339 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.317351 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.420610 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.420693 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.420718 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.420748 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.420772 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.523961 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.524017 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.524027 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.524044 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.524055 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.627282 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.627346 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.627362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.627378 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.627387 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.730317 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.730461 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.730477 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.730494 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.730505 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.833200 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.833289 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.833302 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.833324 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.833339 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.936321 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.936367 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.936379 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.936393 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:19 crc kubenswrapper[4774]: I1121 14:05:19.936404 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:19Z","lastTransitionTime":"2025-11-21T14:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.039337 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.039408 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.039449 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.039481 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.039504 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.142497 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.142935 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.143033 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.143139 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.143220 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.159132 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=88.159097591 podStartE2EDuration="1m28.159097591s" podCreationTimestamp="2025-11-21 14:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.120843354 +0000 UTC m=+110.773042613" watchObservedRunningTime="2025-11-21 14:05:20.159097591 +0000 UTC m=+110.811296860" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.193200 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podStartSLOduration=90.193183833 podStartE2EDuration="1m30.193183833s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.193020119 +0000 UTC m=+110.845219378" watchObservedRunningTime="2025-11-21 14:05:20.193183833 +0000 UTC m=+110.845383092" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.214213 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-z77mh" podStartSLOduration=89.214195648 podStartE2EDuration="1m29.214195648s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.212611604 +0000 UTC m=+110.864810883" watchObservedRunningTime="2025-11-21 14:05:20.214195648 +0000 UTC m=+110.866394907" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.227132 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jkjvp" podStartSLOduration=89.227113121 podStartE2EDuration="1m29.227113121s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.22707622 +0000 UTC m=+110.879275479" watchObservedRunningTime="2025-11-21 14:05:20.227113121 +0000 UTC m=+110.879312370" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.245763 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.245852 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.245869 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.245888 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.245903 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.256301 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=27.256279609 podStartE2EDuration="27.256279609s" podCreationTimestamp="2025-11-21 14:04:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.255805616 +0000 UTC m=+110.908004865" watchObservedRunningTime="2025-11-21 14:05:20.256279609 +0000 UTC m=+110.908478868" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.301876 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-gn24l" podStartSLOduration=90.301853535 podStartE2EDuration="1m30.301853535s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.301747033 +0000 UTC m=+110.953946292" watchObservedRunningTime="2025-11-21 14:05:20.301853535 +0000 UTC m=+110.954052794" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.321395 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-q452c" podStartSLOduration=90.321353699 podStartE2EDuration="1m30.321353699s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.321327618 +0000 UTC m=+110.973526887" watchObservedRunningTime="2025-11-21 14:05:20.321353699 +0000 UTC m=+110.973552968" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.348354 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=88.348335607 podStartE2EDuration="1m28.348335607s" podCreationTimestamp="2025-11-21 14:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.347125794 +0000 UTC m=+110.999325073" watchObservedRunningTime="2025-11-21 14:05:20.348335607 +0000 UTC m=+111.000534866" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.348663 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.348716 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.348734 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.348752 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.348762 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.361550 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=56.361528448 podStartE2EDuration="56.361528448s" podCreationTimestamp="2025-11-21 14:04:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.361173338 +0000 UTC m=+111.013372617" watchObservedRunningTime="2025-11-21 14:05:20.361528448 +0000 UTC m=+111.013727707" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.426698 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-hdxzw" podStartSLOduration=90.4266796 podStartE2EDuration="1m30.4266796s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.414187948 +0000 UTC m=+111.066387207" watchObservedRunningTime="2025-11-21 14:05:20.4266796 +0000 UTC m=+111.078878869" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.451313 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.451355 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.451368 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.451390 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.451403 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.452541 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=18.452522796 podStartE2EDuration="18.452522796s" podCreationTimestamp="2025-11-21 14:05:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:20.42816222 +0000 UTC m=+111.080361499" watchObservedRunningTime="2025-11-21 14:05:20.452522796 +0000 UTC m=+111.104722045" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.554316 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.554365 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.554377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.554398 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.554409 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.656785 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.656875 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.656887 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.656904 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.656915 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.760239 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.760294 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.760309 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.760329 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.760345 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.863466 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.863520 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.863536 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.863563 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.863587 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.966453 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.966533 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.966556 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.966589 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:20 crc kubenswrapper[4774]: I1121 14:05:20.966614 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:20Z","lastTransitionTime":"2025-11-21T14:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.072271 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.072313 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.072322 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.072336 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.072346 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.092758 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.092795 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.092804 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.093034 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:21 crc kubenswrapper[4774]: E1121 14:05:21.093107 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:21 crc kubenswrapper[4774]: E1121 14:05:21.093213 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:21 crc kubenswrapper[4774]: E1121 14:05:21.093373 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:21 crc kubenswrapper[4774]: E1121 14:05:21.093435 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.175192 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.175246 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.175256 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.175269 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.175278 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.278996 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.279070 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.279087 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.279109 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.279156 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.383357 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.383384 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.383391 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.383404 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.383412 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.485121 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.485151 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.485162 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.485178 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.485188 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.587023 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.587067 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.587083 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.587104 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.587121 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.689646 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.689691 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.689701 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.689714 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.689723 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.792915 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.792966 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.792981 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.793001 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.793014 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.895053 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.895093 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.895108 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.895126 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.895140 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.997877 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.997922 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.997948 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.997968 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:21 crc kubenswrapper[4774]: I1121 14:05:21.997982 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:21Z","lastTransitionTime":"2025-11-21T14:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.101446 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.101539 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.101568 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.101598 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.101619 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.204641 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.204712 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.204737 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.204766 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.204789 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.308256 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.308311 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.308328 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.308352 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.308368 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.411293 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.411362 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.411379 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.411432 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.411454 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.513574 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.513623 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.513645 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.513664 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.513676 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.616127 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.616154 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.616161 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.616176 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.616184 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.718221 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.718288 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.718310 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.718341 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.718362 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.820628 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.820692 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.820712 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.820738 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.820754 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.923644 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.924165 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.924507 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.924660 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:22 crc kubenswrapper[4774]: I1121 14:05:22.924809 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:22Z","lastTransitionTime":"2025-11-21T14:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.027883 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.028235 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.028360 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.028485 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.028627 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.093090 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:23 crc kubenswrapper[4774]: E1121 14:05:23.093592 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.093153 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:23 crc kubenswrapper[4774]: E1121 14:05:23.094156 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.093123 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:23 crc kubenswrapper[4774]: E1121 14:05:23.094577 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.093213 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:23 crc kubenswrapper[4774]: E1121 14:05:23.095146 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.131220 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.131286 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.131306 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.131331 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.131349 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.235726 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.235768 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.235781 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.235796 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.235806 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.339215 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.339333 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.339351 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.339377 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.339394 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.445143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.445210 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.445229 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.445261 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.445283 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.548284 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.548324 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.548335 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.548350 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.548361 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.652722 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.653511 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.653611 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.653714 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.653852 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.756702 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.756749 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.756759 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.756774 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.756783 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.776143 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.776189 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.776387 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.776403 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.776413 4774 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-21T14:05:23Z","lastTransitionTime":"2025-11-21T14:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.830955 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv"] Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.831347 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.833259 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.835721 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.835849 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.836077 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.913049 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.913132 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.913189 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.913234 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:23 crc kubenswrapper[4774]: I1121 14:05:23.913283 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014596 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014652 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014685 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014730 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014767 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014880 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.014886 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.016595 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.026658 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.045140 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30bd297d-8bff-4a3e-95d8-c5b5b245e15d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-9vnfv\" (UID: \"30bd297d-8bff-4a3e-95d8-c5b5b245e15d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.145103 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.717991 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" event={"ID":"30bd297d-8bff-4a3e-95d8-c5b5b245e15d","Type":"ContainerStarted","Data":"8983c1026560f453d3b1f24e81bb7c48b6f613948211c8812678cf49e889f79c"} Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.719791 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" event={"ID":"30bd297d-8bff-4a3e-95d8-c5b5b245e15d","Type":"ContainerStarted","Data":"622d08b8939317d957575334223f3631323e71f21880a17e377408057c094e1c"} Nov 21 14:05:24 crc kubenswrapper[4774]: I1121 14:05:24.737442 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-9vnfv" podStartSLOduration=94.737418158 podStartE2EDuration="1m34.737418158s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:24.736590075 +0000 UTC m=+115.388789334" watchObservedRunningTime="2025-11-21 14:05:24.737418158 +0000 UTC m=+115.389617447" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.092083 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.092163 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.092188 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.092268 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:25 crc kubenswrapper[4774]: E1121 14:05:25.092272 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:25 crc kubenswrapper[4774]: E1121 14:05:25.092400 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:25 crc kubenswrapper[4774]: E1121 14:05:25.092513 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:25 crc kubenswrapper[4774]: E1121 14:05:25.092609 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.723335 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/1.log" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.724197 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/0.log" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.724270 4774 generic.go:334] "Generic (PLEG): container finished" podID="0bf8b868-6e71-4073-a9ad-e2ac8ae15215" containerID="2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8" exitCode=1 Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.724358 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerDied","Data":"2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8"} Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.724463 4774 scope.go:117] "RemoveContainer" containerID="258a9762eee9fc5b937c0226351828c3e485f83b7862e521aef62fdc97353f76" Nov 21 14:05:25 crc kubenswrapper[4774]: I1121 14:05:25.724890 4774 scope.go:117] "RemoveContainer" containerID="2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8" Nov 21 14:05:25 crc kubenswrapper[4774]: E1121 14:05:25.725044 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-hdxzw_openshift-multus(0bf8b868-6e71-4073-a9ad-e2ac8ae15215)\"" pod="openshift-multus/multus-hdxzw" podUID="0bf8b868-6e71-4073-a9ad-e2ac8ae15215" Nov 21 14:05:26 crc kubenswrapper[4774]: I1121 14:05:26.729326 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/1.log" Nov 21 14:05:27 crc kubenswrapper[4774]: I1121 14:05:27.093073 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:27 crc kubenswrapper[4774]: E1121 14:05:27.093195 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:27 crc kubenswrapper[4774]: I1121 14:05:27.093558 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:05:27 crc kubenswrapper[4774]: I1121 14:05:27.093683 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:27 crc kubenswrapper[4774]: I1121 14:05:27.093783 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:27 crc kubenswrapper[4774]: I1121 14:05:27.093894 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:27 crc kubenswrapper[4774]: E1121 14:05:27.093748 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-rltf4_openshift-ovn-kubernetes(4057b5ee-926e-4931-b5a0-2c204d18ce72)\"" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" Nov 21 14:05:27 crc kubenswrapper[4774]: E1121 14:05:27.094158 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:27 crc kubenswrapper[4774]: E1121 14:05:27.094235 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:27 crc kubenswrapper[4774]: E1121 14:05:27.094382 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:29 crc kubenswrapper[4774]: I1121 14:05:29.092231 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:29 crc kubenswrapper[4774]: E1121 14:05:29.092909 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:29 crc kubenswrapper[4774]: I1121 14:05:29.092261 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:29 crc kubenswrapper[4774]: E1121 14:05:29.093192 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:29 crc kubenswrapper[4774]: I1121 14:05:29.092230 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:29 crc kubenswrapper[4774]: E1121 14:05:29.093449 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:29 crc kubenswrapper[4774]: I1121 14:05:29.092334 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:29 crc kubenswrapper[4774]: E1121 14:05:29.093690 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:30 crc kubenswrapper[4774]: E1121 14:05:30.071615 4774 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 21 14:05:30 crc kubenswrapper[4774]: E1121 14:05:30.195873 4774 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 14:05:31 crc kubenswrapper[4774]: I1121 14:05:31.093084 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:31 crc kubenswrapper[4774]: I1121 14:05:31.093225 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:31 crc kubenswrapper[4774]: I1121 14:05:31.093263 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:31 crc kubenswrapper[4774]: I1121 14:05:31.093494 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:31 crc kubenswrapper[4774]: E1121 14:05:31.093616 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:31 crc kubenswrapper[4774]: E1121 14:05:31.093481 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:31 crc kubenswrapper[4774]: E1121 14:05:31.093953 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:31 crc kubenswrapper[4774]: E1121 14:05:31.094086 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:33 crc kubenswrapper[4774]: I1121 14:05:33.092610 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:33 crc kubenswrapper[4774]: I1121 14:05:33.092703 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:33 crc kubenswrapper[4774]: E1121 14:05:33.092913 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:33 crc kubenswrapper[4774]: I1121 14:05:33.092987 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:33 crc kubenswrapper[4774]: E1121 14:05:33.093161 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:33 crc kubenswrapper[4774]: E1121 14:05:33.093315 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:33 crc kubenswrapper[4774]: I1121 14:05:33.094269 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:33 crc kubenswrapper[4774]: E1121 14:05:33.094690 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:35 crc kubenswrapper[4774]: I1121 14:05:35.093037 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:35 crc kubenswrapper[4774]: E1121 14:05:35.094055 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:35 crc kubenswrapper[4774]: I1121 14:05:35.093157 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:35 crc kubenswrapper[4774]: I1121 14:05:35.093205 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:35 crc kubenswrapper[4774]: I1121 14:05:35.093155 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:35 crc kubenswrapper[4774]: E1121 14:05:35.094156 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:35 crc kubenswrapper[4774]: E1121 14:05:35.094292 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:35 crc kubenswrapper[4774]: E1121 14:05:35.094531 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:35 crc kubenswrapper[4774]: E1121 14:05:35.198054 4774 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 14:05:37 crc kubenswrapper[4774]: I1121 14:05:37.092309 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:37 crc kubenswrapper[4774]: I1121 14:05:37.092389 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:37 crc kubenswrapper[4774]: I1121 14:05:37.092416 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:37 crc kubenswrapper[4774]: I1121 14:05:37.092534 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:37 crc kubenswrapper[4774]: E1121 14:05:37.092713 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:37 crc kubenswrapper[4774]: E1121 14:05:37.093053 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:37 crc kubenswrapper[4774]: E1121 14:05:37.093083 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:37 crc kubenswrapper[4774]: E1121 14:05:37.093147 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.092617 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.092650 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.092652 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.092698 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:39 crc kubenswrapper[4774]: E1121 14:05:39.092773 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:39 crc kubenswrapper[4774]: E1121 14:05:39.092907 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:39 crc kubenswrapper[4774]: E1121 14:05:39.092995 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:39 crc kubenswrapper[4774]: E1121 14:05:39.093117 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.093282 4774 scope.go:117] "RemoveContainer" containerID="2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.776489 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/1.log" Nov 21 14:05:39 crc kubenswrapper[4774]: I1121 14:05:39.776538 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerStarted","Data":"1c2299fa29bd8573db83cc8a8f6870ad02177ed430bfa9b38d82eda3dc9213d0"} Nov 21 14:05:40 crc kubenswrapper[4774]: E1121 14:05:40.198752 4774 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.092124 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.092233 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.092153 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.092124 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:41 crc kubenswrapper[4774]: E1121 14:05:41.092341 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:41 crc kubenswrapper[4774]: E1121 14:05:41.092404 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:41 crc kubenswrapper[4774]: E1121 14:05:41.092452 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:41 crc kubenswrapper[4774]: E1121 14:05:41.092955 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.093202 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.784379 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/3.log" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.786282 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerStarted","Data":"4311dc642eeb3b7afa452271bbe04109b434f64f9a16c57967c8f8c498472c42"} Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.786975 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.897590 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podStartSLOduration=110.89757082 podStartE2EDuration="1m50.89757082s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:41.81789804 +0000 UTC m=+132.470097299" watchObservedRunningTime="2025-11-21 14:05:41.89757082 +0000 UTC m=+132.549770079" Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.898442 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-44mbn"] Nov 21 14:05:41 crc kubenswrapper[4774]: I1121 14:05:41.898533 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:41 crc kubenswrapper[4774]: E1121 14:05:41.898618 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:43 crc kubenswrapper[4774]: I1121 14:05:43.092099 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:43 crc kubenswrapper[4774]: I1121 14:05:43.092152 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:43 crc kubenswrapper[4774]: E1121 14:05:43.092269 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:43 crc kubenswrapper[4774]: I1121 14:05:43.092341 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:43 crc kubenswrapper[4774]: I1121 14:05:43.092357 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:43 crc kubenswrapper[4774]: E1121 14:05:43.092444 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:43 crc kubenswrapper[4774]: E1121 14:05:43.092575 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:43 crc kubenswrapper[4774]: E1121 14:05:43.092708 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:45 crc kubenswrapper[4774]: I1121 14:05:45.093068 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:45 crc kubenswrapper[4774]: I1121 14:05:45.093103 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:45 crc kubenswrapper[4774]: I1121 14:05:45.093112 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:45 crc kubenswrapper[4774]: I1121 14:05:45.093153 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:45 crc kubenswrapper[4774]: E1121 14:05:45.093244 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 21 14:05:45 crc kubenswrapper[4774]: E1121 14:05:45.093399 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 21 14:05:45 crc kubenswrapper[4774]: E1121 14:05:45.093504 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-44mbn" podUID="0d294e10-6a0e-4871-871c-01fb8e7ead03" Nov 21 14:05:45 crc kubenswrapper[4774]: E1121 14:05:45.093562 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.092451 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.092445 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.092993 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.093446 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.097903 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.097967 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.097980 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.098189 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.098244 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 21 14:05:47 crc kubenswrapper[4774]: I1121 14:05:47.098464 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.275159 4774 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.313160 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d965l"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.313630 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.314315 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.314886 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.315596 4774 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.315651 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.315863 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8h89j"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.316465 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.316639 4774 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.316672 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.317394 4774 reflector.go:561] object-"openshift-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.317437 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.317518 4774 reflector.go:561] object-"openshift-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.317541 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.317690 4774 reflector.go:561] object-"openshift-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.317728 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.317809 4774 reflector.go:561] object-"openshift-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.317876 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.317936 4774 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-tls": failed to list *v1.Secret: secrets "machine-approver-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.317957 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.318009 4774 reflector.go:561] object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.318019 4774 reflector.go:561] object-"openshift-machine-api"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.318028 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.318051 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.318098 4774 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.318118 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.318135 4774 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-config": failed to list *v1.ConfigMap: configmaps "machine-approver-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.318161 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-approver-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.318250 4774 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.318276 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319138 4774 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4": failed to list *v1.Secret: secrets "machine-approver-sa-dockercfg-nl2j4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319168 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-sa-dockercfg-nl2j4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-sa-dockercfg-nl2j4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319184 4774 reflector.go:561] object-"openshift-machine-api"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319227 4774 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319232 4774 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319242 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319221 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319271 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319194 4774 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319292 4774 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: W1121 14:05:54.319289 4774 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-images": failed to list *v1.ConfigMap: configmaps "machine-api-operator-images" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319308 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319322 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-images\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-api-operator-images\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: E1121 14:05:54.319301 4774 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.319510 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zsxng"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.320368 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.322383 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.322623 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.322739 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.324442 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.328722 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-w7tjv"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.329113 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.330466 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wgq25"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.332981 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s665q"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.335595 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.336808 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.344685 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.344988 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.345066 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.345216 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.345314 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.345456 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.345599 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.345962 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.346108 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.346169 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.346860 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.347024 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.347208 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.347380 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.347622 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.348121 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.348254 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.348376 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.348582 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.348803 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.356030 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.360784 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.361487 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.371478 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.374738 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.377270 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.388550 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.388740 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.388961 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.389655 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.389905 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.393981 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.394548 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hb96t"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.394604 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.395163 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-4rc7m"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.395235 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.395684 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.395847 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.396434 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.396471 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.404854 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405225 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405289 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405474 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405494 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.404925 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405054 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405117 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405149 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405193 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405189 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.404860 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.405651 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.407224 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.407502 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.407668 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.409496 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nmdw5"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.409839 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8h89j"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.409857 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zsxng"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.409870 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pgwc4"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.410045 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.410042 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.412678 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.412700 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.413748 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.413929 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.413983 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414194 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414270 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414418 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414486 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414558 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414655 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414800 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.414930 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.415391 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.415656 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.415804 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.416359 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.416532 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.419093 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.420203 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.420536 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.420870 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.425307 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.426405 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.427608 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.428310 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.435233 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.435656 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.435891 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.436672 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.437808 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.438888 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.439084 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.439309 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.439732 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.439878 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.439756 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.440215 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.449914 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vlhqx"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.450175 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.455786 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d40ed0ef-54d5-4a6e-abdf-117f35add216-config\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.455906 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0aad507-e343-416e-b043-2a9af1baa0c7-machine-approver-tls\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.455949 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9lx6\" (UniqueName: \"kubernetes.io/projected/d40ed0ef-54d5-4a6e-abdf-117f35add216-kube-api-access-v9lx6\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.455977 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456013 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-audit-dir\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456051 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-trusted-ca-bundle\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456087 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvcpd\" (UniqueName: \"kubernetes.io/projected/565e5abf-8d99-4427-a923-0270e2080164-kube-api-access-jvcpd\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456122 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-config\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456153 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-config\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456183 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/565e5abf-8d99-4427-a923-0270e2080164-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456212 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456245 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456272 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r87jw\" (UniqueName: \"kubernetes.io/projected/6ded279c-1738-42b5-8828-e4883c3756bf-kube-api-access-r87jw\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456302 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-oauth-config\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456331 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456361 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d40ed0ef-54d5-4a6e-abdf-117f35add216-serving-cert\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456387 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-encryption-config\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456417 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf2hp\" (UniqueName: \"kubernetes.io/projected/b94e7447-7c8a-4f4e-9507-689f1500605c-kube-api-access-bf2hp\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456446 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-etcd-client\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456475 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ded279c-1738-42b5-8828-e4883c3756bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456501 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-audit\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456533 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph6pt\" (UniqueName: \"kubernetes.io/projected/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-kube-api-access-ph6pt\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456563 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-image-import-ca\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456608 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-oauth-serving-cert\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456636 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-etcd-serving-ca\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456679 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-console-config\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456713 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4252b250-3577-4347-a8f6-0da47a0da0b7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456744 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-node-pullsecrets\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456772 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d40ed0ef-54d5-4a6e-abdf-117f35add216-trusted-ca\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456805 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456850 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456886 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4252b250-3577-4347-a8f6-0da47a0da0b7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456933 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbzhp\" (UniqueName: \"kubernetes.io/projected/516ab72d-be26-41a3-8f34-2fce0bf4febb-kube-api-access-zbzhp\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456968 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-images\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.456998 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-serving-cert\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.457025 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-serving-cert\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.457056 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9m2p\" (UniqueName: \"kubernetes.io/projected/c0aad507-e343-416e-b043-2a9af1baa0c7-kube-api-access-f9m2p\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.457101 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/565e5abf-8d99-4427-a923-0270e2080164-serving-cert\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.457129 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-275x6\" (UniqueName: \"kubernetes.io/projected/4252b250-3577-4347-a8f6-0da47a0da0b7-kube-api-access-275x6\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.457327 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-service-ca\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.457375 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-auth-proxy-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.460764 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.463700 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.464435 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-rmzf9"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.465797 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.476720 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.477839 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.481017 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.481212 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.482077 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.482421 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.484118 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.484524 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.485122 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.485776 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.486598 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.488652 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.489679 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.489775 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.490730 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.490926 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gfhfj"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.491472 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.491912 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gdvrm"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.492575 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.496083 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.497650 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.500223 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.500606 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.500925 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.506679 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.507527 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.508595 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mcznv"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.509407 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.509629 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.510791 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.511275 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.512131 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.512897 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-79zhf"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.513717 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.514004 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-w7tjv"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.515339 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.515531 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.516128 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.516658 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gsdtl"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.517081 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.518302 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s665q"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.520163 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-bjlhz"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.521062 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.521357 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-7qsfc"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.522003 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.522900 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4rc7m"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.523900 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.525052 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wgq25"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.527653 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hb96t"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.529252 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.532497 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.533035 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.534172 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.536160 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.536215 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.538604 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d965l"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.539609 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.551319 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.551378 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560094 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ded279c-1738-42b5-8828-e4883c3756bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560244 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-audit\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560742 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbdmq\" (UniqueName: \"kubernetes.io/projected/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-kube-api-access-sbdmq\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560779 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph6pt\" (UniqueName: \"kubernetes.io/projected/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-kube-api-access-ph6pt\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560866 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-image-import-ca\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560901 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99ffd227-1170-4aa8-8232-519d2b605f26-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560927 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4lb7\" (UniqueName: \"kubernetes.io/projected/68d64bbb-e62d-4d42-b89c-3e84d14a0d27-kube-api-access-c4lb7\") pod \"cluster-samples-operator-665b6dd947-mx8jr\" (UID: \"68d64bbb-e62d-4d42-b89c-3e84d14a0d27\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.560980 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-audit-dir\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561012 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a6c40568-f5bb-48c3-bc00-a5b78c663270-metrics-tls\") pod \"dns-operator-744455d44c-hb96t\" (UID: \"a6c40568-f5bb-48c3-bc00-a5b78c663270\") " pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561110 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-oauth-serving-cert\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561160 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-etcd-serving-ca\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561193 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-console-config\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561323 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4252b250-3577-4347-a8f6-0da47a0da0b7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561399 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-node-pullsecrets\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561447 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d40ed0ef-54d5-4a6e-abdf-117f35add216-trusted-ca\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561491 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-etcd-client\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561539 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561583 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grj4w\" (UniqueName: \"kubernetes.io/projected/cf55b393-ef41-4c5f-94d8-f3e829eca612-kube-api-access-grj4w\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561627 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99ffd227-1170-4aa8-8232-519d2b605f26-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561681 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4252b250-3577-4347-a8f6-0da47a0da0b7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561722 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561764 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffd390c3-23b8-4d65-a346-47b2ccb6b917-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561848 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-audit\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561889 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-trusted-ca\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.561969 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-etcd-service-ca\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.562527 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68d64bbb-e62d-4d42-b89c-3e84d14a0d27-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mx8jr\" (UID: \"68d64bbb-e62d-4d42-b89c-3e84d14a0d27\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.562548 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-console-config\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.562607 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-images\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.562654 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-serving-cert\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.562841 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbzhp\" (UniqueName: \"kubernetes.io/projected/516ab72d-be26-41a3-8f34-2fce0bf4febb-kube-api-access-zbzhp\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564088 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564141 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/86527758-2544-4192-8e14-64e1194a024e-etcd-client\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564301 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/565e5abf-8d99-4427-a923-0270e2080164-serving-cert\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564372 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-275x6\" (UniqueName: \"kubernetes.io/projected/4252b250-3577-4347-a8f6-0da47a0da0b7-kube-api-access-275x6\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564420 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-serving-cert\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564424 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4252b250-3577-4347-a8f6-0da47a0da0b7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.562193 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564886 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9m2p\" (UniqueName: \"kubernetes.io/projected/c0aad507-e343-416e-b043-2a9af1baa0c7-kube-api-access-f9m2p\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564928 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86527758-2544-4192-8e14-64e1194a024e-serving-cert\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564301 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-oauth-serving-cert\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.564951 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cf55b393-ef41-4c5f-94d8-f3e829eca612-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.566255 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-image-import-ca\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.566321 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-service-ca\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.565134 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-node-pullsecrets\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.566937 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-auth-proxy-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.567000 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffd390c3-23b8-4d65-a346-47b2ccb6b917-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.567386 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-etcd-serving-ca\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.567957 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-service-ca\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.567155 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.568732 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d40ed0ef-54d5-4a6e-abdf-117f35add216-trusted-ca\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.568558 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571394 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-serving-cert\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571497 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-encryption-config\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571541 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-serving-cert\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571554 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d40ed0ef-54d5-4a6e-abdf-117f35add216-config\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571589 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0aad507-e343-416e-b043-2a9af1baa0c7-machine-approver-tls\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571674 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/223a69f9-6da6-49f6-8dc6-791fdb76a205-serving-cert\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571699 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9lx6\" (UniqueName: \"kubernetes.io/projected/d40ed0ef-54d5-4a6e-abdf-117f35add216-kube-api-access-v9lx6\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571725 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571761 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-metrics-tls\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571786 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-config\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571810 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-audit-dir\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571854 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kg9b\" (UniqueName: \"kubernetes.io/projected/86527758-2544-4192-8e14-64e1194a024e-kube-api-access-7kg9b\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.571877 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfqpj\" (UniqueName: \"kubernetes.io/projected/a6c40568-f5bb-48c3-bc00-a5b78c663270-kube-api-access-gfqpj\") pod \"dns-operator-744455d44c-hb96t\" (UID: \"a6c40568-f5bb-48c3-bc00-a5b78c663270\") " pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572173 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-trusted-ca-bundle\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572463 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d40ed0ef-54d5-4a6e-abdf-117f35add216-config\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572521 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-client-ca\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572572 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd390c3-23b8-4d65-a346-47b2ccb6b917-config\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572608 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghdt4\" (UniqueName: \"kubernetes.io/projected/223a69f9-6da6-49f6-8dc6-791fdb76a205-kube-api-access-ghdt4\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572634 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qknw\" (UniqueName: \"kubernetes.io/projected/1c533952-b089-4c49-b4dc-a969c08022b9-kube-api-access-4qknw\") pod \"downloads-7954f5f757-4rc7m\" (UID: \"1c533952-b089-4c49-b4dc-a969c08022b9\") " pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572706 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvcpd\" (UniqueName: \"kubernetes.io/projected/565e5abf-8d99-4427-a923-0270e2080164-kube-api-access-jvcpd\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572751 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e451f31-029f-4072-847d-4ac2d4452ece-config\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572774 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-etcd-ca\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572798 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cf55b393-ef41-4c5f-94d8-f3e829eca612-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.572978 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf55b393-ef41-4c5f-94d8-f3e829eca612-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573016 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-config\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573040 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-config\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573062 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-audit-policies\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573114 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js5pg\" (UniqueName: \"kubernetes.io/projected/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-kube-api-access-js5pg\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573137 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgt6h\" (UniqueName: \"kubernetes.io/projected/8e451f31-029f-4072-847d-4ac2d4452ece-kube-api-access-fgt6h\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573432 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-trusted-ca-bundle\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573484 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-audit-dir\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573561 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/565e5abf-8d99-4427-a923-0270e2080164-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573609 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573667 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-config\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573917 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/565e5abf-8d99-4427-a923-0270e2080164-available-featuregates\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573963 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.573985 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-serving-cert\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574004 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r87jw\" (UniqueName: \"kubernetes.io/projected/6ded279c-1738-42b5-8828-e4883c3756bf-kube-api-access-r87jw\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574021 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-oauth-config\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574067 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574083 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d40ed0ef-54d5-4a6e-abdf-117f35add216-serving-cert\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574201 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-config\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574242 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf2hp\" (UniqueName: \"kubernetes.io/projected/b94e7447-7c8a-4f4e-9507-689f1500605c-kube-api-access-bf2hp\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574347 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-etcd-client\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574445 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-encryption-config\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574609 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e451f31-029f-4072-847d-4ac2d4452ece-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574646 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tmwg\" (UniqueName: \"kubernetes.io/projected/99ffd227-1170-4aa8-8232-519d2b605f26-kube-api-access-9tmwg\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.574676 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7npg2\" (UniqueName: \"kubernetes.io/projected/99f783ba-3348-491e-849d-51149e55f7cc-kube-api-access-7npg2\") pod \"migrator-59844c95c7-6txj8\" (UID: \"99f783ba-3348-491e-849d-51149e55f7cc\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.575761 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.576390 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gfhfj"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.576513 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4252b250-3577-4347-a8f6-0da47a0da0b7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.577021 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-trusted-ca-bundle\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.577062 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.577806 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.578309 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d40ed0ef-54d5-4a6e-abdf-117f35add216-serving-cert\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.578878 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nmdw5"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.579617 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-etcd-client\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.579891 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.580863 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-f9llk"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.582209 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.582546 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-f9llk"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.584078 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-79zhf"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.586615 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.587634 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gdvrm"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.588613 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.589623 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.590261 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/565e5abf-8d99-4427-a923-0270e2080164-serving-cert\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.591067 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.591453 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-oauth-config\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.592222 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.593421 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vlhqx"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.595281 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.596641 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pgwc4"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.597274 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.598306 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mcznv"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.598343 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-encryption-config\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.599364 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.600520 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gsdtl"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.601720 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bjlhz"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.602791 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.604209 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zpzzf"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.605669 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zpzzf"] Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.605803 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.616187 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.635495 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.655626 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675431 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grj4w\" (UniqueName: \"kubernetes.io/projected/cf55b393-ef41-4c5f-94d8-f3e829eca612-kube-api-access-grj4w\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675479 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99ffd227-1170-4aa8-8232-519d2b605f26-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675519 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffd390c3-23b8-4d65-a346-47b2ccb6b917-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675556 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-trusted-ca\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675577 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-etcd-service-ca\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675599 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68d64bbb-e62d-4d42-b89c-3e84d14a0d27-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mx8jr\" (UID: \"68d64bbb-e62d-4d42-b89c-3e84d14a0d27\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675637 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675658 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/86527758-2544-4192-8e14-64e1194a024e-etcd-client\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675715 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86527758-2544-4192-8e14-64e1194a024e-serving-cert\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675740 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cf55b393-ef41-4c5f-94d8-f3e829eca612-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffd390c3-23b8-4d65-a346-47b2ccb6b917-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675864 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.675891 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676370 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676500 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676648 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-encryption-config\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676689 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/223a69f9-6da6-49f6-8dc6-791fdb76a205-serving-cert\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676733 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-metrics-tls\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676763 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-config\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kg9b\" (UniqueName: \"kubernetes.io/projected/86527758-2544-4192-8e14-64e1194a024e-kube-api-access-7kg9b\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676813 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfqpj\" (UniqueName: \"kubernetes.io/projected/a6c40568-f5bb-48c3-bc00-a5b78c663270-kube-api-access-gfqpj\") pod \"dns-operator-744455d44c-hb96t\" (UID: \"a6c40568-f5bb-48c3-bc00-a5b78c663270\") " pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676912 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-client-ca\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676937 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd390c3-23b8-4d65-a346-47b2ccb6b917-config\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676966 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghdt4\" (UniqueName: \"kubernetes.io/projected/223a69f9-6da6-49f6-8dc6-791fdb76a205-kube-api-access-ghdt4\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676988 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-etcd-service-ca\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.676990 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qknw\" (UniqueName: \"kubernetes.io/projected/1c533952-b089-4c49-b4dc-a969c08022b9-kube-api-access-4qknw\") pod \"downloads-7954f5f757-4rc7m\" (UID: \"1c533952-b089-4c49-b4dc-a969c08022b9\") " pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677046 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e451f31-029f-4072-847d-4ac2d4452ece-config\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677063 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-etcd-ca\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677085 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cf55b393-ef41-4c5f-94d8-f3e829eca612-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677105 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf55b393-ef41-4c5f-94d8-f3e829eca612-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677128 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-audit-policies\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677143 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js5pg\" (UniqueName: \"kubernetes.io/projected/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-kube-api-access-js5pg\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677159 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgt6h\" (UniqueName: \"kubernetes.io/projected/8e451f31-029f-4072-847d-4ac2d4452ece-kube-api-access-fgt6h\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-serving-cert\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677221 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-config\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677247 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e451f31-029f-4072-847d-4ac2d4452ece-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677270 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tmwg\" (UniqueName: \"kubernetes.io/projected/99ffd227-1170-4aa8-8232-519d2b605f26-kube-api-access-9tmwg\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677289 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7npg2\" (UniqueName: \"kubernetes.io/projected/99f783ba-3348-491e-849d-51149e55f7cc-kube-api-access-7npg2\") pod \"migrator-59844c95c7-6txj8\" (UID: \"99f783ba-3348-491e-849d-51149e55f7cc\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677313 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbdmq\" (UniqueName: \"kubernetes.io/projected/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-kube-api-access-sbdmq\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677509 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99ffd227-1170-4aa8-8232-519d2b605f26-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677526 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4lb7\" (UniqueName: \"kubernetes.io/projected/68d64bbb-e62d-4d42-b89c-3e84d14a0d27-kube-api-access-c4lb7\") pod \"cluster-samples-operator-665b6dd947-mx8jr\" (UID: \"68d64bbb-e62d-4d42-b89c-3e84d14a0d27\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.677781 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-config\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678132 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-client-ca\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678395 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cf55b393-ef41-4c5f-94d8-f3e829eca612-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678550 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ffd390c3-23b8-4d65-a346-47b2ccb6b917-config\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678784 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e451f31-029f-4072-847d-4ac2d4452ece-config\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678861 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678888 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-audit-dir\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678913 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a6c40568-f5bb-48c3-bc00-a5b78c663270-metrics-tls\") pod \"dns-operator-744455d44c-hb96t\" (UID: \"a6c40568-f5bb-48c3-bc00-a5b78c663270\") " pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678950 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-audit-policies\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.678960 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-audit-dir\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.679039 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-etcd-client\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.679374 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/68d64bbb-e62d-4d42-b89c-3e84d14a0d27-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-mx8jr\" (UID: \"68d64bbb-e62d-4d42-b89c-3e84d14a0d27\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.679397 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/86527758-2544-4192-8e14-64e1194a024e-etcd-ca\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.679544 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/86527758-2544-4192-8e14-64e1194a024e-etcd-client\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.679899 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-encryption-config\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.680422 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-config\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.680465 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e451f31-029f-4072-847d-4ac2d4452ece-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.681810 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-etcd-client\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.682528 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/223a69f9-6da6-49f6-8dc6-791fdb76a205-serving-cert\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.683178 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86527758-2544-4192-8e14-64e1194a024e-serving-cert\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.683200 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cf55b393-ef41-4c5f-94d8-f3e829eca612-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.683522 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-serving-cert\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.683797 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a6c40568-f5bb-48c3-bc00-a5b78c663270-metrics-tls\") pod \"dns-operator-744455d44c-hb96t\" (UID: \"a6c40568-f5bb-48c3-bc00-a5b78c663270\") " pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.689520 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ffd390c3-23b8-4d65-a346-47b2ccb6b917-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.698505 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.716927 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.737022 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.756569 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.776364 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.795535 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.800845 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-metrics-tls\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.816709 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.842701 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.848387 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-trusted-ca\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.876427 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.895652 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.915727 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.935436 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.956398 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.976152 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.979901 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99ffd227-1170-4aa8-8232-519d2b605f26-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:54 crc kubenswrapper[4774]: I1121 14:05:54.997216 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.011131 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99ffd227-1170-4aa8-8232-519d2b605f26-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.015844 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.036298 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.056222 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.076348 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.096038 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.116065 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.136178 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.157313 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.177447 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.217394 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.236120 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.256196 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.276189 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.296680 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.316229 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.336979 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.356222 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.375759 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.395522 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.427687 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.436540 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.461357 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.482803 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.494833 4774 request.go:700] Waited for 1.002372694s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/secrets?fieldSelector=metadata.name%3Dv4-0-config-user-idp-0-file-data&limit=500&resourceVersion=0 Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.496077 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.516239 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.535940 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.556199 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.561104 4774 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.561220 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6ded279c-1738-42b5-8828-e4883c3756bf-machine-api-operator-tls podName:6ded279c-1738-42b5-8828-e4883c3756bf nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.061188226 +0000 UTC m=+146.713387485 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/6ded279c-1738-42b5-8828-e4883c3756bf-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-8h89j" (UID: "6ded279c-1738-42b5-8828-e4883c3756bf") : failed to sync secret cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.563354 4774 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/machine-approver-config: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.563490 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-config podName:c0aad507-e343-416e-b043-2a9af1baa0c7 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.063476051 +0000 UTC m=+146.715675310 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-config") pod "machine-approver-56656f9798-pbvh7" (UID: "c0aad507-e343-416e-b043-2a9af1baa0c7") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.566593 4774 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.566660 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-images podName:6ded279c-1738-42b5-8828-e4883c3756bf nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.066645581 +0000 UTC m=+146.718844850 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-images") pod "machine-api-operator-5694c8668f-8h89j" (UID: "6ded279c-1738-42b5-8828-e4883c3756bf") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.567686 4774 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.567921 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert podName:516ab72d-be26-41a3-8f34-2fce0bf4febb nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.067878266 +0000 UTC m=+146.720077525 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert") pod "controller-manager-879f6c89f-d965l" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb") : failed to sync secret cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.572611 4774 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.572638 4774 secret.go:188] Couldn't get secret openshift-cluster-machine-approver/machine-approver-tls: failed to sync secret cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.572691 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c0aad507-e343-416e-b043-2a9af1baa0c7-machine-approver-tls podName:c0aad507-e343-416e-b043-2a9af1baa0c7 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.072679383 +0000 UTC m=+146.724878642 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-approver-tls" (UniqueName: "kubernetes.io/secret/c0aad507-e343-416e-b043-2a9af1baa0c7-machine-approver-tls") pod "machine-approver-56656f9798-pbvh7" (UID: "c0aad507-e343-416e-b043-2a9af1baa0c7") : failed to sync secret cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.572689 4774 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.572722 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-auth-proxy-config podName:c0aad507-e343-416e-b043-2a9af1baa0c7 nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.072715804 +0000 UTC m=+146.724915063 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "auth-proxy-config" (UniqueName: "kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-auth-proxy-config") pod "machine-approver-56656f9798-pbvh7" (UID: "c0aad507-e343-416e-b043-2a9af1baa0c7") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.572784 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles podName:516ab72d-be26-41a3-8f34-2fce0bf4febb nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.072770605 +0000 UTC m=+146.724969864 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles") pod "controller-manager-879f6c89f-d965l" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.573910 4774 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.573997 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-config podName:6ded279c-1738-42b5-8828-e4883c3756bf nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.0739818 +0000 UTC m=+146.726181289 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-config") pod "machine-api-operator-5694c8668f-8h89j" (UID: "6ded279c-1738-42b5-8828-e4883c3756bf") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.573924 4774 configmap.go:193] Couldn't get configMap openshift-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.574048 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config podName:516ab72d-be26-41a3-8f34-2fce0bf4febb nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.074038521 +0000 UTC m=+146.726238020 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config") pod "controller-manager-879f6c89f-d965l" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.575031 4774 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: E1121 14:05:55.575079 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca podName:516ab72d-be26-41a3-8f34-2fce0bf4febb nodeName:}" failed. No retries permitted until 2025-11-21 14:05:56.075070041 +0000 UTC m=+146.727269300 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca") pod "controller-manager-879f6c89f-d965l" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb") : failed to sync configmap cache: timed out waiting for the condition Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.576012 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.596240 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.616075 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.636247 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.656919 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.675647 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.697736 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.716850 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.736993 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.756462 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.775973 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.795232 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.816452 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.835803 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.855759 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.876630 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.896795 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.916744 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.936466 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 21 14:05:55 crc kubenswrapper[4774]: I1121 14:05:55.956225 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.035859 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.056251 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.116076 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.156191 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.256121 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.674161 4774 request.go:700] Waited for 1.997668746s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/serviceaccounts/cluster-image-registry-operator/token Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.704597 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grj4w\" (UniqueName: \"kubernetes.io/projected/cf55b393-ef41-4c5f-94d8-f3e829eca612-kube-api-access-grj4w\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.937161 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.976887 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 21 14:05:56 crc kubenswrapper[4774]: I1121 14:05:56.995973 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.016162 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.036446 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.056957 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.076808 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.346227 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.346589 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.346806 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-images\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347053 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-auth-proxy-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347232 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0aad507-e343-416e-b043-2a9af1baa0c7-machine-approver-tls\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347383 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347562 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-config\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347699 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347803 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.347982 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ded279c-1738-42b5-8828-e4883c3756bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.349426 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-images\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.349902 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-auth-proxy-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.350545 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.353188 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.353403 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.353725 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355027 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355097 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355290 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355318 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355392 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355456 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355620 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355620 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355712 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355888 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.355977 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.356063 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.357232 4774 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.371419 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6ded279c-1738-42b5-8828-e4883c3756bf-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.373084 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0aad507-e343-416e-b043-2a9af1baa0c7-config\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.373473 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.373728 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.373852 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.374456 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c0aad507-e343-416e-b043-2a9af1baa0c7-machine-approver-tls\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.374608 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.374741 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.375990 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ded279c-1738-42b5-8828-e4883c3756bf-config\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.382163 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.386506 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.386798 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.387108 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.387160 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.387231 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.397007 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.397394 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.397840 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.397942 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.397861 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.398545 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfqpj\" (UniqueName: \"kubernetes.io/projected/a6c40568-f5bb-48c3-bc00-a5b78c663270-kube-api-access-gfqpj\") pod \"dns-operator-744455d44c-hb96t\" (UID: \"a6c40568-f5bb-48c3-bc00-a5b78c663270\") " pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.398557 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.398832 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.399440 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r87jw\" (UniqueName: \"kubernetes.io/projected/6ded279c-1738-42b5-8828-e4883c3756bf-kube-api-access-r87jw\") pod \"machine-api-operator-5694c8668f-8h89j\" (UID: \"6ded279c-1738-42b5-8828-e4883c3756bf\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.401106 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvcpd\" (UniqueName: \"kubernetes.io/projected/565e5abf-8d99-4427-a923-0270e2080164-kube-api-access-jvcpd\") pod \"openshift-config-operator-7777fb866f-zsxng\" (UID: \"565e5abf-8d99-4427-a923-0270e2080164\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.401464 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbdmq\" (UniqueName: \"kubernetes.io/projected/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-kube-api-access-sbdmq\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.402089 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4lb7\" (UniqueName: \"kubernetes.io/projected/68d64bbb-e62d-4d42-b89c-3e84d14a0d27-kube-api-access-c4lb7\") pod \"cluster-samples-operator-665b6dd947-mx8jr\" (UID: \"68d64bbb-e62d-4d42-b89c-3e84d14a0d27\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.402805 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tmwg\" (UniqueName: \"kubernetes.io/projected/99ffd227-1170-4aa8-8232-519d2b605f26-kube-api-access-9tmwg\") pod \"kube-storage-version-migrator-operator-b67b599dd-gmzc4\" (UID: \"99ffd227-1170-4aa8-8232-519d2b605f26\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.403121 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-275x6\" (UniqueName: \"kubernetes.io/projected/4252b250-3577-4347-a8f6-0da47a0da0b7-kube-api-access-275x6\") pod \"openshift-controller-manager-operator-756b6f6bc6-skbsp\" (UID: \"4252b250-3577-4347-a8f6-0da47a0da0b7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.403833 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js5pg\" (UniqueName: \"kubernetes.io/projected/f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2-kube-api-access-js5pg\") pod \"apiserver-7bbb656c7d-44rg5\" (UID: \"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.404696 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffd390c3-23b8-4d65-a346-47b2ccb6b917-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9dv6b\" (UID: \"ffd390c3-23b8-4d65-a346-47b2ccb6b917\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.404859 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cf55b393-ef41-4c5f-94d8-f3e829eca612-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mkldg\" (UID: \"cf55b393-ef41-4c5f-94d8-f3e829eca612\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.408543 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgt6h\" (UniqueName: \"kubernetes.io/projected/8e451f31-029f-4072-847d-4ac2d4452ece-kube-api-access-fgt6h\") pod \"openshift-apiserver-operator-796bbdcf4f-89bbs\" (UID: \"8e451f31-029f-4072-847d-4ac2d4452ece\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.409068 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7npg2\" (UniqueName: \"kubernetes.io/projected/99f783ba-3348-491e-849d-51149e55f7cc-kube-api-access-7npg2\") pod \"migrator-59844c95c7-6txj8\" (UID: \"99f783ba-3348-491e-849d-51149e55f7cc\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.409125 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf2hp\" (UniqueName: \"kubernetes.io/projected/b94e7447-7c8a-4f4e-9507-689f1500605c-kube-api-access-bf2hp\") pod \"console-f9d7485db-w7tjv\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.409317 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph6pt\" (UniqueName: \"kubernetes.io/projected/a6c4e0bf-1d46-41a8-9b64-fdcb10025225-kube-api-access-ph6pt\") pod \"apiserver-76f77b778f-wgq25\" (UID: \"a6c4e0bf-1d46-41a8-9b64-fdcb10025225\") " pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.411078 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kg9b\" (UniqueName: \"kubernetes.io/projected/86527758-2544-4192-8e14-64e1194a024e-kube-api-access-7kg9b\") pod \"etcd-operator-b45778765-nmdw5\" (UID: \"86527758-2544-4192-8e14-64e1194a024e\") " pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.411476 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qknw\" (UniqueName: \"kubernetes.io/projected/1c533952-b089-4c49-b4dc-a969c08022b9-kube-api-access-4qknw\") pod \"downloads-7954f5f757-4rc7m\" (UID: \"1c533952-b089-4c49-b4dc-a969c08022b9\") " pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.412414 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9lx6\" (UniqueName: \"kubernetes.io/projected/d40ed0ef-54d5-4a6e-abdf-117f35add216-kube-api-access-v9lx6\") pod \"console-operator-58897d9998-s665q\" (UID: \"d40ed0ef-54d5-4a6e-abdf-117f35add216\") " pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.412509 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9m2p\" (UniqueName: \"kubernetes.io/projected/c0aad507-e343-416e-b043-2a9af1baa0c7-kube-api-access-f9m2p\") pod \"machine-approver-56656f9798-pbvh7\" (UID: \"c0aad507-e343-416e-b043-2a9af1baa0c7\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.413278 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghdt4\" (UniqueName: \"kubernetes.io/projected/223a69f9-6da6-49f6-8dc6-791fdb76a205-kube-api-access-ghdt4\") pod \"route-controller-manager-6576b87f9c-gtfqp\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.417030 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbzhp\" (UniqueName: \"kubernetes.io/projected/516ab72d-be26-41a3-8f34-2fce0bf4febb-kube-api-access-zbzhp\") pod \"controller-manager-879f6c89f-d965l\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.420555 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a518d994-29bd-43a1-9dcb-870dd7d0ecdf-bound-sa-token\") pod \"ingress-operator-5b745b69d9-9mrfc\" (UID: \"a518d994-29bd-43a1-9dcb-870dd7d0ecdf\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.430963 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.445063 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.449657 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-service-ca-bundle\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.449696 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f1760383-3b9d-4c38-b474-75ec72a82819-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.449719 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-metrics-certs\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.449749 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjd2g\" (UniqueName: \"kubernetes.io/projected/efe34db3-8e05-439e-b576-50c0ee864dda-kube-api-access-rjd2g\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.449900 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-default-certificate\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.450186 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-config\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.450325 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f1760383-3b9d-4c38-b474-75ec72a82819-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.450436 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.450542 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rx59\" (UniqueName: \"kubernetes.io/projected/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-kube-api-access-5rx59\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.450651 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-trusted-ca\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.450765 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm5dv\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-kube-api-access-pm5dv\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451127 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451263 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-config\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451384 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.451472 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:57.951459194 +0000 UTC m=+148.603658453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451500 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451522 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-service-ca-bundle\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451541 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efe34db3-8e05-439e-b576-50c0ee864dda-serving-cert\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451560 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-registry-certificates\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451584 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-registry-tls\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451615 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-bound-sa-token\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.451630 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-stats-auth\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.459616 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.494398 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.496003 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.507692 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.515876 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.523642 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.534480 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.552855 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.553032 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.053005156 +0000 UTC m=+148.705204415 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553345 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/577947c3-d018-41ae-9c69-5443707c1073-profile-collector-cert\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553419 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-webhook-cert\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553444 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/577947c3-d018-41ae-9c69-5443707c1073-srv-cert\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553794 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-policies\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553854 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/735fdae4-98f5-4826-9d0f-92e784b93645-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553883 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-bound-sa-token\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.553958 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2279k\" (UniqueName: \"kubernetes.io/projected/aa95d68b-3894-42cf-9af0-18b2575250c4-kube-api-access-2279k\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554031 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-service-ca-bundle\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554059 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2294f\" (UniqueName: \"kubernetes.io/projected/577947c3-d018-41ae-9c69-5443707c1073-kube-api-access-2294f\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554186 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554454 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-csi-data-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554487 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-socket-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554511 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/15b8faa6-1708-463a-9371-033ee86fd845-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-79zhf\" (UID: \"15b8faa6-1708-463a-9371-033ee86fd845\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554540 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/827301f1-4fae-491e-bcf5-7c9319bbe1aa-metrics-tls\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554563 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/735fdae4-98f5-4826-9d0f-92e784b93645-images\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554579 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-service-ca-bundle\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554587 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f262d7bf-02db-4d70-8c81-af3e592ae0d4-signing-cabundle\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.554677 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-metrics-certs\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.555081 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.556111 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f1760383-3b9d-4c38-b474-75ec72a82819-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.556925 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f1760383-3b9d-4c38-b474-75ec72a82819-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557314 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c60662-2ab1-49a1-8fda-0103d3f8cc78-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ps5m6\" (UID: \"c9c60662-2ab1-49a1-8fda-0103d3f8cc78\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557393 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557415 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557432 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhlgz\" (UniqueName: \"kubernetes.io/projected/c9c60662-2ab1-49a1-8fda-0103d3f8cc78-kube-api-access-mhlgz\") pod \"package-server-manager-789f6589d5-ps5m6\" (UID: \"c9c60662-2ab1-49a1-8fda-0103d3f8cc78\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557459 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rx59\" (UniqueName: \"kubernetes.io/projected/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-kube-api-access-5rx59\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557480 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1b4aca26-5092-42ac-ac83-9bee708dbfcf-proxy-tls\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557498 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa95d68b-3894-42cf-9af0-18b2575250c4-secret-volume\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557519 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4aea8951-3939-4012-966c-b0571f992df4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557548 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-apiservice-cert\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557655 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-config\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.557673 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aea8951-3939-4012-966c-b0571f992df4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558025 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4t5r\" (UniqueName: \"kubernetes.io/projected/d32e138f-edc8-459c-8cbf-9d1a07be8e67-kube-api-access-t4t5r\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558072 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f262d7bf-02db-4d70-8c81-af3e592ae0d4-signing-key\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558099 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzgwv\" (UniqueName: \"kubernetes.io/projected/15b8faa6-1708-463a-9371-033ee86fd845-kube-api-access-pzgwv\") pod \"multus-admission-controller-857f4d67dd-79zhf\" (UID: \"15b8faa6-1708-463a-9371-033ee86fd845\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558117 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/827301f1-4fae-491e-bcf5-7c9319bbe1aa-config-volume\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558153 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558173 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-mountpoint-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558193 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/adc5270c-e6f1-4f6d-b6db-62395196bc1c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558237 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/968f0685-b314-4b9f-86d0-0e39704e8083-serving-cert\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558260 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2thqs\" (UniqueName: \"kubernetes.io/projected/cc511127-ef34-4387-986d-4d1228a730d4-kube-api-access-2thqs\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558420 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558449 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/735fdae4-98f5-4826-9d0f-92e784b93645-proxy-tls\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558522 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-dir\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558560 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z2kj\" (UniqueName: \"kubernetes.io/projected/c3717d22-e2b1-427b-8585-9ba3daa3b61c-kube-api-access-7z2kj\") pod \"control-plane-machine-set-operator-78cbb6b69f-c8jn9\" (UID: \"c3717d22-e2b1-427b-8585-9ba3daa3b61c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558593 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxrh7\" (UniqueName: \"kubernetes.io/projected/968f0685-b314-4b9f-86d0-0e39704e8083-kube-api-access-wxrh7\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558609 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558647 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558668 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/be63cc1d-3001-4622-93c0-ec885e5134d9-node-bootstrap-token\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558688 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cc511127-ef34-4387-986d-4d1228a730d4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558731 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/adc5270c-e6f1-4f6d-b6db-62395196bc1c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558810 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558887 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d-cert\") pod \"ingress-canary-bjlhz\" (UID: \"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d\") " pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558930 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-stats-auth\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558945 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-tmpfs\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.558974 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559029 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559045 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968f0685-b314-4b9f-86d0-0e39704e8083-config\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559096 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-plugins-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559140 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f1760383-3b9d-4c38-b474-75ec72a82819-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559180 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c3717d22-e2b1-427b-8585-9ba3daa3b61c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-c8jn9\" (UID: \"c3717d22-e2b1-427b-8585-9ba3daa3b61c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559210 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa95d68b-3894-42cf-9af0-18b2575250c4-config-volume\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559269 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjd2g\" (UniqueName: \"kubernetes.io/projected/efe34db3-8e05-439e-b576-50c0ee864dda-kube-api-access-rjd2g\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559288 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-default-certificate\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559323 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkwvq\" (UniqueName: \"kubernetes.io/projected/827301f1-4fae-491e-bcf5-7c9319bbe1aa-kube-api-access-kkwvq\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559358 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhk7p\" (UniqueName: \"kubernetes.io/projected/8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d-kube-api-access-fhk7p\") pod \"ingress-canary-bjlhz\" (UID: \"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d\") " pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559422 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-config\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.559445 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560073 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1b4aca26-5092-42ac-ac83-9bee708dbfcf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560114 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq5dz\" (UniqueName: \"kubernetes.io/projected/4aea8951-3939-4012-966c-b0571f992df4-kube-api-access-mq5dz\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560170 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560199 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-trusted-ca\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560216 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz9dr\" (UniqueName: \"kubernetes.io/projected/f262d7bf-02db-4d70-8c81-af3e592ae0d4-kube-api-access-bz9dr\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560251 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm5dv\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-kube-api-access-pm5dv\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560280 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560299 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqm5j\" (UniqueName: \"kubernetes.io/projected/cc8ceff5-9c76-4521-a560-d9e6424c93f8-kube-api-access-rqm5j\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560315 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-registration-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560331 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adc5270c-e6f1-4f6d-b6db-62395196bc1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560352 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9wcp\" (UniqueName: \"kubernetes.io/projected/735fdae4-98f5-4826-9d0f-92e784b93645-kube-api-access-n9wcp\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560374 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560393 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560414 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lclw9\" (UniqueName: \"kubernetes.io/projected/be63cc1d-3001-4622-93c0-ec885e5134d9-kube-api-access-lclw9\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560430 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvhcm\" (UniqueName: \"kubernetes.io/projected/1b4aca26-5092-42ac-ac83-9bee708dbfcf-kube-api-access-bvhcm\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560444 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cc511127-ef34-4387-986d-4d1228a730d4-srv-cert\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560474 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efe34db3-8e05-439e-b576-50c0ee864dda-serving-cert\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560493 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-service-ca-bundle\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560509 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/be63cc1d-3001-4622-93c0-ec885e5134d9-certs\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560530 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-registry-certificates\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560554 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-registry-tls\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.560718 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddzd2\" (UniqueName: \"kubernetes.io/projected/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-kube-api-access-ddzd2\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.564223 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-metrics-certs\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.563747 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-config\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.565380 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-registry-certificates\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.565610 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.566206 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efe34db3-8e05-439e-b576-50c0ee864dda-config\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.566496 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.066467778 +0000 UTC m=+148.718667027 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.566862 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-trusted-ca\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.572602 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efe34db3-8e05-439e-b576-50c0ee864dda-serving-cert\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.572678 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.573156 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f1760383-3b9d-4c38-b474-75ec72a82819-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.573655 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-default-certificate\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.575721 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-stats-auth\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.576276 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-registry-tls\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.578730 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-service-ca-bundle\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.599650 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-bound-sa-token\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.619609 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rx59\" (UniqueName: \"kubernetes.io/projected/7c1cea49-d382-4985-b4f3-4ec4a0ec52da-kube-api-access-5rx59\") pod \"router-default-5444994796-rmzf9\" (UID: \"7c1cea49-d382-4985-b4f3-4ec4a0ec52da\") " pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.619880 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.625479 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.629432 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.633604 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.643129 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.648455 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.656465 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjd2g\" (UniqueName: \"kubernetes.io/projected/efe34db3-8e05-439e-b576-50c0ee864dda-kube-api-access-rjd2g\") pod \"authentication-operator-69f744f599-vlhqx\" (UID: \"efe34db3-8e05-439e-b576-50c0ee864dda\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.657110 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662320 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.662576 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.162531635 +0000 UTC m=+148.814730894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662628 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662677 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-csi-data-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662706 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/15b8faa6-1708-463a-9371-033ee86fd845-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-79zhf\" (UID: \"15b8faa6-1708-463a-9371-033ee86fd845\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662733 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/827301f1-4fae-491e-bcf5-7c9319bbe1aa-metrics-tls\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662759 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/735fdae4-98f5-4826-9d0f-92e784b93645-images\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662796 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-socket-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.662976 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f262d7bf-02db-4d70-8c81-af3e592ae0d4-signing-cabundle\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663024 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c60662-2ab1-49a1-8fda-0103d3f8cc78-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ps5m6\" (UID: \"c9c60662-2ab1-49a1-8fda-0103d3f8cc78\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663052 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663086 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663130 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhlgz\" (UniqueName: \"kubernetes.io/projected/c9c60662-2ab1-49a1-8fda-0103d3f8cc78-kube-api-access-mhlgz\") pod \"package-server-manager-789f6589d5-ps5m6\" (UID: \"c9c60662-2ab1-49a1-8fda-0103d3f8cc78\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663157 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4aea8951-3939-4012-966c-b0571f992df4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663181 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1b4aca26-5092-42ac-ac83-9bee708dbfcf-proxy-tls\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663205 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa95d68b-3894-42cf-9af0-18b2575250c4-secret-volume\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663233 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-apiservice-cert\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663265 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4t5r\" (UniqueName: \"kubernetes.io/projected/d32e138f-edc8-459c-8cbf-9d1a07be8e67-kube-api-access-t4t5r\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663306 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aea8951-3939-4012-966c-b0571f992df4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663347 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f262d7bf-02db-4d70-8c81-af3e592ae0d4-signing-key\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663377 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzgwv\" (UniqueName: \"kubernetes.io/projected/15b8faa6-1708-463a-9371-033ee86fd845-kube-api-access-pzgwv\") pod \"multus-admission-controller-857f4d67dd-79zhf\" (UID: \"15b8faa6-1708-463a-9371-033ee86fd845\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663400 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/827301f1-4fae-491e-bcf5-7c9319bbe1aa-config-volume\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663428 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-mountpoint-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663450 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/adc5270c-e6f1-4f6d-b6db-62395196bc1c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663481 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663506 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/968f0685-b314-4b9f-86d0-0e39704e8083-serving-cert\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663529 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2thqs\" (UniqueName: \"kubernetes.io/projected/cc511127-ef34-4387-986d-4d1228a730d4-kube-api-access-2thqs\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663570 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/735fdae4-98f5-4826-9d0f-92e784b93645-proxy-tls\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663594 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z2kj\" (UniqueName: \"kubernetes.io/projected/c3717d22-e2b1-427b-8585-9ba3daa3b61c-kube-api-access-7z2kj\") pod \"control-plane-machine-set-operator-78cbb6b69f-c8jn9\" (UID: \"c3717d22-e2b1-427b-8585-9ba3daa3b61c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663622 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-dir\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663647 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxrh7\" (UniqueName: \"kubernetes.io/projected/968f0685-b314-4b9f-86d0-0e39704e8083-kube-api-access-wxrh7\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663669 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/adc5270c-e6f1-4f6d-b6db-62395196bc1c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663692 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663714 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663737 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/be63cc1d-3001-4622-93c0-ec885e5134d9-node-bootstrap-token\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663760 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cc511127-ef34-4387-986d-4d1228a730d4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663787 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663834 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d-cert\") pod \"ingress-canary-bjlhz\" (UID: \"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d\") " pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663859 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-tmpfs\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663880 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663909 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663914 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f262d7bf-02db-4d70-8c81-af3e592ae0d4-signing-cabundle\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663933 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968f0685-b314-4b9f-86d0-0e39704e8083-config\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663955 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-plugins-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.663991 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa95d68b-3894-42cf-9af0-18b2575250c4-config-volume\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664019 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c3717d22-e2b1-427b-8585-9ba3daa3b61c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-c8jn9\" (UID: \"c3717d22-e2b1-427b-8585-9ba3daa3b61c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664090 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkwvq\" (UniqueName: \"kubernetes.io/projected/827301f1-4fae-491e-bcf5-7c9319bbe1aa-kube-api-access-kkwvq\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664116 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhk7p\" (UniqueName: \"kubernetes.io/projected/8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d-kube-api-access-fhk7p\") pod \"ingress-canary-bjlhz\" (UID: \"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d\") " pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664146 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1b4aca26-5092-42ac-ac83-9bee708dbfcf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664173 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq5dz\" (UniqueName: \"kubernetes.io/projected/4aea8951-3939-4012-966c-b0571f992df4-kube-api-access-mq5dz\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664198 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664229 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz9dr\" (UniqueName: \"kubernetes.io/projected/f262d7bf-02db-4d70-8c81-af3e592ae0d4-kube-api-access-bz9dr\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664278 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adc5270c-e6f1-4f6d-b6db-62395196bc1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664312 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664338 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqm5j\" (UniqueName: \"kubernetes.io/projected/cc8ceff5-9c76-4521-a560-d9e6424c93f8-kube-api-access-rqm5j\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664361 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-registration-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664387 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9wcp\" (UniqueName: \"kubernetes.io/projected/735fdae4-98f5-4826-9d0f-92e784b93645-kube-api-access-n9wcp\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664417 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664440 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lclw9\" (UniqueName: \"kubernetes.io/projected/be63cc1d-3001-4622-93c0-ec885e5134d9-kube-api-access-lclw9\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664464 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvhcm\" (UniqueName: \"kubernetes.io/projected/1b4aca26-5092-42ac-ac83-9bee708dbfcf-kube-api-access-bvhcm\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664486 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cc511127-ef34-4387-986d-4d1228a730d4-srv-cert\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664506 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/be63cc1d-3001-4622-93c0-ec885e5134d9-certs\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddzd2\" (UniqueName: \"kubernetes.io/projected/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-kube-api-access-ddzd2\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664567 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/577947c3-d018-41ae-9c69-5443707c1073-profile-collector-cert\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664601 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-webhook-cert\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664623 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/577947c3-d018-41ae-9c69-5443707c1073-srv-cert\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664645 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-policies\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664663 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/735fdae4-98f5-4826-9d0f-92e784b93645-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664690 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2279k\" (UniqueName: \"kubernetes.io/projected/aa95d68b-3894-42cf-9af0-18b2575250c4-kube-api-access-2279k\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.664713 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2294f\" (UniqueName: \"kubernetes.io/projected/577947c3-d018-41ae-9c69-5443707c1073-kube-api-access-2294f\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.666104 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/735fdae4-98f5-4826-9d0f-92e784b93645-images\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.666477 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/adc5270c-e6f1-4f6d-b6db-62395196bc1c-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.666603 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/827301f1-4fae-491e-bcf5-7c9319bbe1aa-config-volume\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.667569 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aea8951-3939-4012-966c-b0571f992df4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.668222 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-dir\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.671098 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.171081058 +0000 UTC m=+148.823280437 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.671798 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-socket-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.673284 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cc511127-ef34-4387-986d-4d1228a730d4-profile-collector-cert\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.673748 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa95d68b-3894-42cf-9af0-18b2575250c4-secret-volume\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.674207 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.674569 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.674780 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.675288 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/be63cc1d-3001-4622-93c0-ec885e5134d9-node-bootstrap-token\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.675654 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.675658 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-csi-data-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.676581 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-registration-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.677156 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-apiservice-cert\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.677288 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f262d7bf-02db-4d70-8c81-af3e592ae0d4-signing-key\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.677478 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-mountpoint-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.677792 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/d32e138f-edc8-459c-8cbf-9d1a07be8e67-plugins-dir\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.678536 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa95d68b-3894-42cf-9af0-18b2575250c4-config-volume\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.679501 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1b4aca26-5092-42ac-ac83-9bee708dbfcf-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.679936 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.680115 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/adc5270c-e6f1-4f6d-b6db-62395196bc1c-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.679941 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/968f0685-b314-4b9f-86d0-0e39704e8083-config\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.680421 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/827301f1-4fae-491e-bcf5-7c9319bbe1aa-metrics-tls\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.680949 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.681127 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.682299 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-tmpfs\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.683131 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-policies\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.686133 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c9c60662-2ab1-49a1-8fda-0103d3f8cc78-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-ps5m6\" (UID: \"c9c60662-2ab1-49a1-8fda-0103d3f8cc78\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.686484 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.689231 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/735fdae4-98f5-4826-9d0f-92e784b93645-auth-proxy-config\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.691991 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.692430 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-kj4js\" (UID: \"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.692477 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1b4aca26-5092-42ac-ac83-9bee708dbfcf-proxy-tls\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.694357 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/735fdae4-98f5-4826-9d0f-92e784b93645-proxy-tls\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.695282 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cc511127-ef34-4387-986d-4d1228a730d4-srv-cert\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.695512 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4aea8951-3939-4012-966c-b0571f992df4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.695584 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/be63cc1d-3001-4622-93c0-ec885e5134d9-certs\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.695607 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/577947c3-d018-41ae-9c69-5443707c1073-profile-collector-cert\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.696113 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d-cert\") pod \"ingress-canary-bjlhz\" (UID: \"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d\") " pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.696337 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/968f0685-b314-4b9f-86d0-0e39704e8083-serving-cert\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.696440 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/c3717d22-e2b1-427b-8585-9ba3daa3b61c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-c8jn9\" (UID: \"c3717d22-e2b1-427b-8585-9ba3daa3b61c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.696519 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-webhook-cert\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.696769 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.702656 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.704432 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/577947c3-d018-41ae-9c69-5443707c1073-srv-cert\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.708202 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm5dv\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-kube-api-access-pm5dv\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.708993 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/15b8faa6-1708-463a-9371-033ee86fd845-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-79zhf\" (UID: \"15b8faa6-1708-463a-9371-033ee86fd845\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.714768 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.720172 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.720324 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.720793 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.739121 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2294f\" (UniqueName: \"kubernetes.io/projected/577947c3-d018-41ae-9c69-5443707c1073-kube-api-access-2294f\") pod \"catalog-operator-68c6474976-vbl6m\" (UID: \"577947c3-d018-41ae-9c69-5443707c1073\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:57 crc kubenswrapper[4774]: W1121 14:05:57.749729 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0aad507_e343_416e_b043_2a9af1baa0c7.slice/crio-5f2e3c40e144fa4d1df1a3dc89e5927807fd6fea67f671358c2d0eea445b3e0b WatchSource:0}: Error finding container 5f2e3c40e144fa4d1df1a3dc89e5927807fd6fea67f671358c2d0eea445b3e0b: Status 404 returned error can't find the container with id 5f2e3c40e144fa4d1df1a3dc89e5927807fd6fea67f671358c2d0eea445b3e0b Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.753606 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4t5r\" (UniqueName: \"kubernetes.io/projected/d32e138f-edc8-459c-8cbf-9d1a07be8e67-kube-api-access-t4t5r\") pod \"csi-hostpathplugin-zpzzf\" (UID: \"d32e138f-edc8-459c-8cbf-9d1a07be8e67\") " pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.768182 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.768689 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.268661808 +0000 UTC m=+148.920861067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.801627 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z2kj\" (UniqueName: \"kubernetes.io/projected/c3717d22-e2b1-427b-8585-9ba3daa3b61c-kube-api-access-7z2kj\") pod \"control-plane-machine-set-operator-78cbb6b69f-c8jn9\" (UID: \"c3717d22-e2b1-427b-8585-9ba3daa3b61c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.809487 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxrh7\" (UniqueName: \"kubernetes.io/projected/968f0685-b314-4b9f-86d0-0e39704e8083-kube-api-access-wxrh7\") pod \"service-ca-operator-777779d784-mcznv\" (UID: \"968f0685-b314-4b9f-86d0-0e39704e8083\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.817685 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.818956 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzgwv\" (UniqueName: \"kubernetes.io/projected/15b8faa6-1708-463a-9371-033ee86fd845-kube-api-access-pzgwv\") pod \"multus-admission-controller-857f4d67dd-79zhf\" (UID: \"15b8faa6-1708-463a-9371-033ee86fd845\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.839419 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.859066 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhlgz\" (UniqueName: \"kubernetes.io/projected/c9c60662-2ab1-49a1-8fda-0103d3f8cc78-kube-api-access-mhlgz\") pod \"package-server-manager-789f6589d5-ps5m6\" (UID: \"c9c60662-2ab1-49a1-8fda-0103d3f8cc78\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.866971 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2thqs\" (UniqueName: \"kubernetes.io/projected/cc511127-ef34-4387-986d-4d1228a730d4-kube-api-access-2thqs\") pod \"olm-operator-6b444d44fb-8kj7t\" (UID: \"cc511127-ef34-4387-986d-4d1228a730d4\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.869990 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.870350 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.370333644 +0000 UTC m=+149.022532913 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.892916 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqm5j\" (UniqueName: \"kubernetes.io/projected/cc8ceff5-9c76-4521-a560-d9e6424c93f8-kube-api-access-rqm5j\") pod \"oauth-openshift-558db77b4-gfhfj\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.896615 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9wcp\" (UniqueName: \"kubernetes.io/projected/735fdae4-98f5-4826-9d0f-92e784b93645-kube-api-access-n9wcp\") pod \"machine-config-operator-74547568cd-gbv24\" (UID: \"735fdae4-98f5-4826-9d0f-92e784b93645\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.897852 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.917593 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkwvq\" (UniqueName: \"kubernetes.io/projected/827301f1-4fae-491e-bcf5-7c9319bbe1aa-kube-api-access-kkwvq\") pod \"dns-default-f9llk\" (UID: \"827301f1-4fae-491e-bcf5-7c9319bbe1aa\") " pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.926052 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-s665q"] Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.927881 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-wgq25"] Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.931724 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-w7tjv"] Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.931799 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhk7p\" (UniqueName: \"kubernetes.io/projected/8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d-kube-api-access-fhk7p\") pod \"ingress-canary-bjlhz\" (UID: \"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d\") " pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.941037 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.956916 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/adc5270c-e6f1-4f6d-b6db-62395196bc1c-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-lz8fs\" (UID: \"adc5270c-e6f1-4f6d-b6db-62395196bc1c\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.967446 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.974306 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.974414 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.474391387 +0000 UTC m=+149.126590646 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.974622 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:57 crc kubenswrapper[4774]: E1121 14:05:57.974956 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.474947943 +0000 UTC m=+149.127147202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:57 crc kubenswrapper[4774]: I1121 14:05:57.977877 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq5dz\" (UniqueName: \"kubernetes.io/projected/4aea8951-3939-4012-966c-b0571f992df4-kube-api-access-mq5dz\") pod \"marketplace-operator-79b997595-gsdtl\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.002079 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz9dr\" (UniqueName: \"kubernetes.io/projected/f262d7bf-02db-4d70-8c81-af3e592ae0d4-kube-api-access-bz9dr\") pod \"service-ca-9c57cc56f-gdvrm\" (UID: \"f262d7bf-02db-4d70-8c81-af3e592ae0d4\") " pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.013178 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.013544 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lclw9\" (UniqueName: \"kubernetes.io/projected/be63cc1d-3001-4622-93c0-ec885e5134d9-kube-api-access-lclw9\") pod \"machine-config-server-7qsfc\" (UID: \"be63cc1d-3001-4622-93c0-ec885e5134d9\") " pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.017958 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.032642 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.039992 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.049216 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.049897 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvhcm\" (UniqueName: \"kubernetes.io/projected/1b4aca26-5092-42ac-ac83-9bee708dbfcf-kube-api-access-bvhcm\") pod \"machine-config-controller-84d6567774-7psfz\" (UID: \"1b4aca26-5092-42ac-ac83-9bee708dbfcf\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.060121 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.063336 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddzd2\" (UniqueName: \"kubernetes.io/projected/1cff40e3-a3a2-4cfb-9cf1-a30cbae71000-kube-api-access-ddzd2\") pod \"packageserver-d55dfcdfc-jpftc\" (UID: \"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.072365 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2279k\" (UniqueName: \"kubernetes.io/projected/aa95d68b-3894-42cf-9af0-18b2575250c4-kube-api-access-2279k\") pod \"collect-profiles-29395560-7hdr9\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.072864 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.076231 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.076767 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.576744863 +0000 UTC m=+149.228944112 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: W1121 14:05:58.076865 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6c4e0bf_1d46_41a8_9b64_fdcb10025225.slice/crio-22851e9a17b61b347bb280275b20d940352222d610a8ea5ccc18236e3312fa18 WatchSource:0}: Error finding container 22851e9a17b61b347bb280275b20d940352222d610a8ea5ccc18236e3312fa18: Status 404 returned error can't find the container with id 22851e9a17b61b347bb280275b20d940352222d610a8ea5ccc18236e3312fa18 Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.081243 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.094360 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" Nov 21 14:05:58 crc kubenswrapper[4774]: W1121 14:05:58.109041 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd40ed0ef_54d5_4a6e_abdf_117f35add216.slice/crio-12589fec73837bbc4d84a6daced537296d9e53fdc5661774e6e4f10a9ba16548 WatchSource:0}: Error finding container 12589fec73837bbc4d84a6daced537296d9e53fdc5661774e6e4f10a9ba16548: Status 404 returned error can't find the container with id 12589fec73837bbc4d84a6daced537296d9e53fdc5661774e6e4f10a9ba16548 Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.124577 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.126767 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.152481 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.153000 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.155810 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.162606 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-bjlhz" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.175700 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-7qsfc" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.176474 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-f9llk" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.177257 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.177595 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.677581725 +0000 UTC m=+149.329780984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.182267 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.278924 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.279112 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.779074346 +0000 UTC m=+149.431273605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.279387 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.279730 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.779719594 +0000 UTC m=+149.431918853 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: W1121 14:05:58.293036 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4252b250_3577_4347_a8f6_0da47a0da0b7.slice/crio-5ee0a7e6be2dac483b9b2bba2d1cd9c28cf6426b40e9e817802e3fb3ee3c69a4 WatchSource:0}: Error finding container 5ee0a7e6be2dac483b9b2bba2d1cd9c28cf6426b40e9e817802e3fb3ee3c69a4: Status 404 returned error can't find the container with id 5ee0a7e6be2dac483b9b2bba2d1cd9c28cf6426b40e9e817802e3fb3ee3c69a4 Nov 21 14:05:58 crc kubenswrapper[4774]: W1121 14:05:58.306096 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod223a69f9_6da6_49f6_8dc6_791fdb76a205.slice/crio-5a859cbb874430ed40ff6c6adb091d23cf5a4087fff11e84f372d11af7eaa070 WatchSource:0}: Error finding container 5a859cbb874430ed40ff6c6adb091d23cf5a4087fff11e84f372d11af7eaa070: Status 404 returned error can't find the container with id 5a859cbb874430ed40ff6c6adb091d23cf5a4087fff11e84f372d11af7eaa070 Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.370511 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" event={"ID":"a6c4e0bf-1d46-41a8-9b64-fdcb10025225","Type":"ContainerStarted","Data":"22851e9a17b61b347bb280275b20d940352222d610a8ea5ccc18236e3312fa18"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.374988 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rmzf9" event={"ID":"7c1cea49-d382-4985-b4f3-4ec4a0ec52da","Type":"ContainerStarted","Data":"e26ccbd4a183d604f73e55ea0cb60c3514a98f097ca85732ceb7b7665a7e1f46"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.375036 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-rmzf9" event={"ID":"7c1cea49-d382-4985-b4f3-4ec4a0ec52da","Type":"ContainerStarted","Data":"6a445dc06d636d4001f076f4f79649c6a988194300fa5d32e2da3ce100abbecd"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.378797 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" event={"ID":"223a69f9-6da6-49f6-8dc6-791fdb76a205","Type":"ContainerStarted","Data":"5a859cbb874430ed40ff6c6adb091d23cf5a4087fff11e84f372d11af7eaa070"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.382440 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.382899 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.882882473 +0000 UTC m=+149.535081732 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.383042 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-w7tjv" event={"ID":"b94e7447-7c8a-4f4e-9507-689f1500605c","Type":"ContainerStarted","Data":"f947e1763846ee89c51c09cc4d4b74cf15386b9610b51c8f020f6309350d8064"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.393531 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" event={"ID":"c0aad507-e343-416e-b043-2a9af1baa0c7","Type":"ContainerStarted","Data":"e3e5f34f07e6a65314e87868d5799a9d9ada80f8de34c9b6c344f9d378c823c2"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.393598 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" event={"ID":"c0aad507-e343-416e-b043-2a9af1baa0c7","Type":"ContainerStarted","Data":"5f2e3c40e144fa4d1df1a3dc89e5927807fd6fea67f671358c2d0eea445b3e0b"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.394454 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" event={"ID":"4252b250-3577-4347-a8f6-0da47a0da0b7","Type":"ContainerStarted","Data":"5ee0a7e6be2dac483b9b2bba2d1cd9c28cf6426b40e9e817802e3fb3ee3c69a4"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.395992 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s665q" event={"ID":"d40ed0ef-54d5-4a6e-abdf-117f35add216","Type":"ContainerStarted","Data":"12589fec73837bbc4d84a6daced537296d9e53fdc5661774e6e4f10a9ba16548"} Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.412193 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-4rc7m"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.426419 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-nmdw5"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.487022 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.488977 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:58.988962744 +0000 UTC m=+149.641161993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: W1121 14:05:58.557708 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1c533952_b089_4c49_b4dc_a969c08022b9.slice/crio-653beff13ebc598c8189f5d281bb3ad4cc24b173bf041e79253ddc1414a08f9c WatchSource:0}: Error finding container 653beff13ebc598c8189f5d281bb3ad4cc24b173bf041e79253ddc1414a08f9c: Status 404 returned error can't find the container with id 653beff13ebc598c8189f5d281bb3ad4cc24b173bf041e79253ddc1414a08f9c Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.588524 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.588933 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.088913851 +0000 UTC m=+149.741113120 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.626930 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hb96t"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.631377 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.650081 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.690597 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.691120 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.191103932 +0000 UTC m=+149.843303191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.702389 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs"] Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.767245 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.767589 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.793794 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.794312 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.294284191 +0000 UTC m=+149.946483450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.895157 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.895210 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.895251 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.895273 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.895333 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:58 crc kubenswrapper[4774]: E1121 14:05:58.895712 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.39569992 +0000 UTC m=+150.047899179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.896848 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.908113 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.908435 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:58 crc kubenswrapper[4774]: I1121 14:05:58.909697 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.000834 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.001531 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.501509923 +0000 UTC m=+150.153709192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.102564 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.102981 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.602969763 +0000 UTC m=+150.255169022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.115396 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.137246 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.143338 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.204950 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.205278 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.705252637 +0000 UTC m=+150.357451886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.307155 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.308733 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.808717934 +0000 UTC m=+150.460917193 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.340770 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-zsxng"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.347886 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.349284 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-rmzf9" podStartSLOduration=128.349263225 podStartE2EDuration="2m8.349263225s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.340461205 +0000 UTC m=+149.992660464" watchObservedRunningTime="2025-11-21 14:05:59.349263225 +0000 UTC m=+150.001462484" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.375777 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.391310 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.394679 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.404734 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d965l"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.408022 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.408357 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:05:59.908343522 +0000 UTC m=+150.560542781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.427382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" event={"ID":"68d64bbb-e62d-4d42-b89c-3e84d14a0d27","Type":"ContainerStarted","Data":"947967b3468b7ec8227177f6d93e7b2c786b1f88a3d176f159bcf8e4b86ced2c"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.427445 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" event={"ID":"68d64bbb-e62d-4d42-b89c-3e84d14a0d27","Type":"ContainerStarted","Data":"b87648f2f127de2eb41e8282fed3f134dbe604a5e2f3d3db91bc56b1fd8fbd27"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.427457 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" event={"ID":"68d64bbb-e62d-4d42-b89c-3e84d14a0d27","Type":"ContainerStarted","Data":"a360d8264ad1df45911b616924bbe9169b77c726a8e4546c7bbabc18c17d5b75"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.460804 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" event={"ID":"565e5abf-8d99-4427-a923-0270e2080164","Type":"ContainerStarted","Data":"cfed21b4378dd83ea368605585c52cb22b51a170b349b82bfaca30c00b310d66"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.463114 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" event={"ID":"4252b250-3577-4347-a8f6-0da47a0da0b7","Type":"ContainerStarted","Data":"12c4cd3ed6d462a6cd28128173bce384f38ec40e04f3e71b2646043903e2430a"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.473612 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" event={"ID":"a6c40568-f5bb-48c3-bc00-a5b78c663270","Type":"ContainerStarted","Data":"6150429c92f24c8605a0677639742cf238365b22a9b697c51c50d9aed61107ce"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.473646 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" event={"ID":"a6c40568-f5bb-48c3-bc00-a5b78c663270","Type":"ContainerStarted","Data":"75c0168b5a2acf36f0217cd381450f770b7729d30950948dbd65d2062634fed6"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.479116 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-mx8jr" podStartSLOduration=129.47909329 podStartE2EDuration="2m9.47909329s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.471774072 +0000 UTC m=+150.123973371" watchObservedRunningTime="2025-11-21 14:05:59.47909329 +0000 UTC m=+150.131292589" Nov 21 14:05:59 crc kubenswrapper[4774]: W1121 14:05:59.481719 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99ffd227_1170_4aa8_8232_519d2b605f26.slice/crio-22e6381a590278034e7f127aa2ca178877e06572fca44353ade6750220d7e584 WatchSource:0}: Error finding container 22e6381a590278034e7f127aa2ca178877e06572fca44353ade6750220d7e584: Status 404 returned error can't find the container with id 22e6381a590278034e7f127aa2ca178877e06572fca44353ade6750220d7e584 Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.499522 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-skbsp" podStartSLOduration=128.499504589 podStartE2EDuration="2m8.499504589s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.497430271 +0000 UTC m=+150.149629550" watchObservedRunningTime="2025-11-21 14:05:59.499504589 +0000 UTC m=+150.151703848" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.501395 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" event={"ID":"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2","Type":"ContainerStarted","Data":"d7523e1b3299cd8fe8becd0ab758db7245307350e9880146210965d8653f6036"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.501421 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" event={"ID":"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2","Type":"ContainerStarted","Data":"c6e02ab9d0442aa02a7c350c87ea5e59f47a364af7413825959923f5f40b4b07"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.505220 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" event={"ID":"c0aad507-e343-416e-b043-2a9af1baa0c7","Type":"ContainerStarted","Data":"eefbc41d8a0b1817012664b094c99e0e9289d7036a0c352e45bf14ba2e261411"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.511445 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.512677 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.012658303 +0000 UTC m=+150.664857562 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.536748 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" event={"ID":"cf55b393-ef41-4c5f-94d8-f3e829eca612","Type":"ContainerStarted","Data":"8b359029214b4a79fea825e51341990d04034be594917eca7cc5852c08d00ee7"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.545866 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-s665q" event={"ID":"d40ed0ef-54d5-4a6e-abdf-117f35add216","Type":"ContainerStarted","Data":"6ed52d41fe1f36c3760ccc0ea2c4045cd9fd7be4d30aebdf043c77a331273e49"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.546594 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pbvh7" podStartSLOduration=129.546575086 podStartE2EDuration="2m9.546575086s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.546413181 +0000 UTC m=+150.198612440" watchObservedRunningTime="2025-11-21 14:05:59.546575086 +0000 UTC m=+150.198774345" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.547307 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.558583 4774 generic.go:334] "Generic (PLEG): container finished" podID="a6c4e0bf-1d46-41a8-9b64-fdcb10025225" containerID="31ce0357c93ac8e7b318b97665088a0cfa43adc8a3e704e30776e8250343dd6e" exitCode=0 Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.558667 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" event={"ID":"a6c4e0bf-1d46-41a8-9b64-fdcb10025225","Type":"ContainerDied","Data":"31ce0357c93ac8e7b318b97665088a0cfa43adc8a3e704e30776e8250343dd6e"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.574201 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-w7tjv" event={"ID":"b94e7447-7c8a-4f4e-9507-689f1500605c","Type":"ContainerStarted","Data":"ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.580909 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7qsfc" event={"ID":"be63cc1d-3001-4622-93c0-ec885e5134d9","Type":"ContainerStarted","Data":"058434e9c6a0f2daca0470273f21f6f1c8863af645d0ce2b5e40460c7a0681ca"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.580954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-7qsfc" event={"ID":"be63cc1d-3001-4622-93c0-ec885e5134d9","Type":"ContainerStarted","Data":"ce085530eace420ac3c17ff3ef9e4847d3e81179b0b3406c26d31a5123758444"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.592010 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-s665q" podStartSLOduration=128.591990985 podStartE2EDuration="2m8.591990985s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.568890989 +0000 UTC m=+150.221090268" watchObservedRunningTime="2025-11-21 14:05:59.591990985 +0000 UTC m=+150.244190244" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.593591 4774 patch_prober.go:28] interesting pod/console-operator-58897d9998-s665q container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.593618 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4rc7m" event={"ID":"1c533952-b089-4c49-b4dc-a969c08022b9","Type":"ContainerStarted","Data":"31628deae1a7c50cc0974efd3d2d6322ddeafa196c32e23563ff74748de1877b"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.593662 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4rc7m" event={"ID":"1c533952-b089-4c49-b4dc-a969c08022b9","Type":"ContainerStarted","Data":"653beff13ebc598c8189f5d281bb3ad4cc24b173bf041e79253ddc1414a08f9c"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.593655 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-s665q" podUID="d40ed0ef-54d5-4a6e-abdf-117f35add216" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.595350 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.601315 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.601365 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.609785 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" event={"ID":"223a69f9-6da6-49f6-8dc6-791fdb76a205","Type":"ContainerStarted","Data":"a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.610577 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.620350 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.620405 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.622033 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-7qsfc" podStartSLOduration=5.622022707 podStartE2EDuration="5.622022707s" podCreationTimestamp="2025-11-21 14:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.621468901 +0000 UTC m=+150.273668160" watchObservedRunningTime="2025-11-21 14:05:59.622022707 +0000 UTC m=+150.274221966" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.624418 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.626426 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.126402052 +0000 UTC m=+150.778601311 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.642076 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" event={"ID":"8e451f31-029f-4072-847d-4ac2d4452ece","Type":"ContainerStarted","Data":"6c9e2e94238176dd5493c503c6022d662338739bc7046c08c8b645d70107a676"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.642126 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" event={"ID":"8e451f31-029f-4072-847d-4ac2d4452ece","Type":"ContainerStarted","Data":"d1b81dc12e13106b36263fe6a885274cef24432758d36d2f3b1e98040e29027f"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.662015 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-w7tjv" podStartSLOduration=128.661996642 podStartE2EDuration="2m8.661996642s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.661712914 +0000 UTC m=+150.313912183" watchObservedRunningTime="2025-11-21 14:05:59.661996642 +0000 UTC m=+150.314195901" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.662513 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:05:59 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:05:59 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:05:59 crc kubenswrapper[4774]: healthz check failed Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.662553 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.671736 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" event={"ID":"86527758-2544-4192-8e14-64e1194a024e","Type":"ContainerStarted","Data":"b9008ec470df3b1f201e809c8df2ef6b640d1869903f30cd1c17a738951c0c2f"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.671788 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" event={"ID":"86527758-2544-4192-8e14-64e1194a024e","Type":"ContainerStarted","Data":"16cc61225496cfb3d5377d892ba1fef4036552e87e79b1efd9060fc05b713da3"} Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.716619 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-4rc7m" podStartSLOduration=128.716600262 podStartE2EDuration="2m8.716600262s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.715311015 +0000 UTC m=+150.367510284" watchObservedRunningTime="2025-11-21 14:05:59.716600262 +0000 UTC m=+150.368799521" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.727617 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.729133 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.229118217 +0000 UTC m=+150.881317476 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.758900 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.783397 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vlhqx"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.806851 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" podStartSLOduration=127.806831073 podStartE2EDuration="2m7.806831073s" podCreationTimestamp="2025-11-21 14:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.759551001 +0000 UTC m=+150.411750260" watchObservedRunningTime="2025-11-21 14:05:59.806831073 +0000 UTC m=+150.459030332" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.808605 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-89bbs" podStartSLOduration=128.808593173 podStartE2EDuration="2m8.808593173s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.792039953 +0000 UTC m=+150.444239222" watchObservedRunningTime="2025-11-21 14:05:59.808593173 +0000 UTC m=+150.460792432" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.838158 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gsdtl"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.838799 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.840458 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.340409496 +0000 UTC m=+150.992608765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: W1121 14:05:59.855792 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ded279c_1738_42b5_8828_e4883c3756bf.slice/crio-d9df8fc16539adaaee18a6d0a57781317b839d300ee2810684b72006a3d13871 WatchSource:0}: Error finding container d9df8fc16539adaaee18a6d0a57781317b839d300ee2810684b72006a3d13871: Status 404 returned error can't find the container with id d9df8fc16539adaaee18a6d0a57781317b839d300ee2810684b72006a3d13871 Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.855970 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.869899 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8h89j"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.876788 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gdvrm"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.898090 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.902697 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.908948 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.915994 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.916075 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-nmdw5" podStartSLOduration=128.916060024 podStartE2EDuration="2m8.916060024s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:05:59.849292729 +0000 UTC m=+150.501491988" watchObservedRunningTime="2025-11-21 14:05:59.916060024 +0000 UTC m=+150.568259283" Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.928090 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-79zhf"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.930877 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.943205 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:05:59 crc kubenswrapper[4774]: E1121 14:05:59.947190 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.447171857 +0000 UTC m=+151.099371116 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.953971 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-zpzzf"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.962261 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.983304 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m"] Nov 21 14:05:59 crc kubenswrapper[4774]: I1121 14:05:59.993642 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-f9llk"] Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.007197 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-bjlhz"] Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.022174 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6"] Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.036659 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs"] Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.057718 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.058412 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.558391454 +0000 UTC m=+151.210590713 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: W1121 14:06:00.079144 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaa95d68b_3894_42cf_9af0_18b2575250c4.slice/crio-d366b8d35250bca8d57042d505f2d4ada6ec85fc3b0bdd4c961b8cc9f44de5a2 WatchSource:0}: Error finding container d366b8d35250bca8d57042d505f2d4ada6ec85fc3b0bdd4c961b8cc9f44de5a2: Status 404 returned error can't find the container with id d366b8d35250bca8d57042d505f2d4ada6ec85fc3b0bdd4c961b8cc9f44de5a2 Nov 21 14:06:00 crc kubenswrapper[4774]: W1121 14:06:00.082087 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod827301f1_4fae_491e_bcf5_7c9319bbe1aa.slice/crio-66faa93d57b19bd81c51469da708a38d968af5c5a4cc1674c0a60c812def4237 WatchSource:0}: Error finding container 66faa93d57b19bd81c51469da708a38d968af5c5a4cc1674c0a60c812def4237: Status 404 returned error can't find the container with id 66faa93d57b19bd81c51469da708a38d968af5c5a4cc1674c0a60c812def4237 Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.110902 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gfhfj"] Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.161655 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.162019 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.662003555 +0000 UTC m=+151.314202804 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.192803 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-mcznv"] Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.211939 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:06:00 crc kubenswrapper[4774]: W1121 14:06:00.239559 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9c60662_2ab1_49a1_8fda_0103d3f8cc78.slice/crio-672aa46f8f7eb65baafc2d80accf79fb3bb2c2577230af19884ab131d9e8cacd WatchSource:0}: Error finding container 672aa46f8f7eb65baafc2d80accf79fb3bb2c2577230af19884ab131d9e8cacd: Status 404 returned error can't find the container with id 672aa46f8f7eb65baafc2d80accf79fb3bb2c2577230af19884ab131d9e8cacd Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.264256 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.264427 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.764401041 +0000 UTC m=+151.416600300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.264758 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.265233 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.765217484 +0000 UTC m=+151.417416743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.377759 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.378204 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.878186811 +0000 UTC m=+151.530386070 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.480127 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.480665 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:00.980648809 +0000 UTC m=+151.632848068 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.581772 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.581984 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.081950355 +0000 UTC m=+151.734149614 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.582174 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.582555 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.082547622 +0000 UTC m=+151.734746881 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.655622 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:00 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:00 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:00 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.656114 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.683311 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.683633 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.18359077 +0000 UTC m=+151.835790019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.683770 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.684358 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.184348742 +0000 UTC m=+151.836548001 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.694763 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" event={"ID":"cc8ceff5-9c76-4521-a560-d9e6424c93f8","Type":"ContainerStarted","Data":"92e97f1ab2fc6ab8e95513562d425d9a1a016e2766833b14e61ca2f8e24fa9a8"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.707344 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" event={"ID":"516ab72d-be26-41a3-8f34-2fce0bf4febb","Type":"ContainerStarted","Data":"4e74fe91c6ab54033fd8833458567569d29f4b2c5b73edf0a016dea1912cd6e7"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.707401 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" event={"ID":"516ab72d-be26-41a3-8f34-2fce0bf4febb","Type":"ContainerStarted","Data":"fc254fa6ef3295a648da0c92099f748825cc9087b2b0d21ee4f287323397321a"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.709786 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.709903 4774 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-d965l container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.709940 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" podUID="516ab72d-be26-41a3-8f34-2fce0bf4febb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.716858 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"92d78d2b1b017f11043c470b6a48b8f61e1006e6a1a1b87858a1a800f00dc80a"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.724807 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" event={"ID":"cf55b393-ef41-4c5f-94d8-f3e829eca612","Type":"ContainerStarted","Data":"1d6e5a989417f4ffb59f9ed03b1fdb283258387bc636ff4ab4558d016dd47056"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.773242 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" event={"ID":"f262d7bf-02db-4d70-8c81-af3e592ae0d4","Type":"ContainerStarted","Data":"522a8c62b3b92b307be453006faa6b25dd5859573b05810604c8d7e466e44ef7"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.782085 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" event={"ID":"c9c60662-2ab1-49a1-8fda-0103d3f8cc78","Type":"ContainerStarted","Data":"672aa46f8f7eb65baafc2d80accf79fb3bb2c2577230af19884ab131d9e8cacd"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.786870 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.787241 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.287210761 +0000 UTC m=+151.939410020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.805008 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" event={"ID":"577947c3-d018-41ae-9c69-5443707c1073","Type":"ContainerStarted","Data":"220f591a44521e7f168fac10f58214fe2384c715fd7c66f21b296ed3344e6332"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.844144 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" event={"ID":"a6c4e0bf-1d46-41a8-9b64-fdcb10025225","Type":"ContainerStarted","Data":"9044e3f8d4d8544c577aa9f459ba3a0ed5c0e1f8b46bb75ee482935823a22bba"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.862324 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" event={"ID":"968f0685-b314-4b9f-86d0-0e39704e8083","Type":"ContainerStarted","Data":"2d30472c19e09955fb261ff6891a713f3bce698a3d54da9de32088c224594c3b"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.869440 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" event={"ID":"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000","Type":"ContainerStarted","Data":"2f2ebc46f9114ad0d8b4239486ee01f554738a3c2bf93b8eb313744cfe14c3d6"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.870448 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.873438 4774 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jpftc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" start-of-body= Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.873510 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" podUID="1cff40e3-a3a2-4cfb-9cf1-a30cbae71000" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": dial tcp 10.217.0.42:5443: connect: connection refused" Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.874877 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" event={"ID":"adc5270c-e6f1-4f6d-b6db-62395196bc1c","Type":"ContainerStarted","Data":"510868feca711a00abd413f54f7982e14ccd349be4487c0f94b5c4c123864a21"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.893893 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" event={"ID":"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb","Type":"ContainerStarted","Data":"38764657d07d00ae81316c987dd19e51fa368bf847ccf8afb94e94a4052431f0"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.894685 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:00 crc kubenswrapper[4774]: E1121 14:06:00.895217 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.395199687 +0000 UTC m=+152.047398946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.911477 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" event={"ID":"d32e138f-edc8-459c-8cbf-9d1a07be8e67","Type":"ContainerStarted","Data":"5599fe230b0adf2b4bc2070995d7bda0f487472b40d8b1b938dbafcddd5e0cfc"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.928459 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bjlhz" event={"ID":"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d","Type":"ContainerStarted","Data":"957c798247e797ae832e0591073112aedbc6e08867cccaf43cc8f68f1ce12fd2"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.933846 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7b498914508cf6ad773fd9021181acf1f948b7794d1b4ee54384bfc4ad7177e0"} Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.952001 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mkldg" podStartSLOduration=129.951972618 podStartE2EDuration="2m9.951972618s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:00.926546967 +0000 UTC m=+151.578746226" watchObservedRunningTime="2025-11-21 14:06:00.951972618 +0000 UTC m=+151.604171877" Nov 21 14:06:00 crc kubenswrapper[4774]: I1121 14:06:00.992873 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" event={"ID":"a6c40568-f5bb-48c3-bc00-a5b78c663270","Type":"ContainerStarted","Data":"a8d532f2f10d8a7f5466a51ad7f7e81428780a7bc779ccb4a0fe8edcb3dc64b3"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.033623 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.034206 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" podStartSLOduration=130.034179032 podStartE2EDuration="2m10.034179032s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:00.980313833 +0000 UTC m=+151.632513112" watchObservedRunningTime="2025-11-21 14:06:01.034179032 +0000 UTC m=+151.686378381" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.034620 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.534602484 +0000 UTC m=+152.186801743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.035525 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" podStartSLOduration=130.03551708 podStartE2EDuration="2m10.03551708s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.030560039 +0000 UTC m=+151.682759298" watchObservedRunningTime="2025-11-21 14:06:01.03551708 +0000 UTC m=+151.687716339" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.038840 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" event={"ID":"4aea8951-3939-4012-966c-b0571f992df4","Type":"ContainerStarted","Data":"0ab8bfd562278b6f2e2ea1867bad7db6edea514d49125d3fb051c0d7013176b8"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.041312 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.054221 4774 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gsdtl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.054331 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.096348 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" event={"ID":"15b8faa6-1708-463a-9371-033ee86fd845","Type":"ContainerStarted","Data":"d2d0f024ed566c4699be5c57bbdb984e08386c481acbf8e29b7f08c180ae47ad"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.106503 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" podStartSLOduration=130.106423803 podStartE2EDuration="2m10.106423803s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.10209604 +0000 UTC m=+151.754295299" watchObservedRunningTime="2025-11-21 14:06:01.106423803 +0000 UTC m=+151.758623082" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.127333 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"da6a4a70dfd1471c57d5a96192b96c77f3d072b811f81a5c73880cc28a87e100"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.141616 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.146504 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.646457189 +0000 UTC m=+152.298656448 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.169714 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-hb96t" podStartSLOduration=130.169685478 podStartE2EDuration="2m10.169685478s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.145250595 +0000 UTC m=+151.797449854" watchObservedRunningTime="2025-11-21 14:06:01.169685478 +0000 UTC m=+151.821884797" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.186086 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" event={"ID":"1b4aca26-5092-42ac-ac83-9bee708dbfcf","Type":"ContainerStarted","Data":"7e60363c239f826b2ce5a1975a0b438551ac96bc0240ab465ef662903ac49d8c"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.194238 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" event={"ID":"cc511127-ef34-4387-986d-4d1228a730d4","Type":"ContainerStarted","Data":"a6d6c2d9c21dd8afcb367874b077c13590c4bd4e6be38379a94d4f373dada4b7"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.194532 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.200879 4774 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8kj7t container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.200935 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" podUID="cc511127-ef34-4387-986d-4d1228a730d4" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.245793 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.246037 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.746005925 +0000 UTC m=+152.398205184 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.246299 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.247642 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.747632591 +0000 UTC m=+152.399831850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.253643 4774 generic.go:334] "Generic (PLEG): container finished" podID="565e5abf-8d99-4427-a923-0270e2080164" containerID="2c544d2fae224aad243d034e0d78577780c6da6cfebdaf135735b5f766e2bee0" exitCode=0 Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.254742 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" event={"ID":"565e5abf-8d99-4427-a923-0270e2080164","Type":"ContainerDied","Data":"2c544d2fae224aad243d034e0d78577780c6da6cfebdaf135735b5f766e2bee0"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.291772 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" event={"ID":"6ded279c-1738-42b5-8828-e4883c3756bf","Type":"ContainerStarted","Data":"d9df8fc16539adaaee18a6d0a57781317b839d300ee2810684b72006a3d13871"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.292268 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" podStartSLOduration=130.292211256 podStartE2EDuration="2m10.292211256s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.224317639 +0000 UTC m=+151.876516898" watchObservedRunningTime="2025-11-21 14:06:01.292211256 +0000 UTC m=+151.944410515" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.304936 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" event={"ID":"ffd390c3-23b8-4d65-a346-47b2ccb6b917","Type":"ContainerStarted","Data":"38d727b534d9de5cee53874c6040cee56c8ff2c6cc6364c25f784516d5bc1a8d"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.304993 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" event={"ID":"ffd390c3-23b8-4d65-a346-47b2ccb6b917","Type":"ContainerStarted","Data":"c18eac2865b9666c5ca3c7a3327b96b5b412d17dfa8ad15b2ff5031d86fa4d0b"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.315485 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" event={"ID":"99f783ba-3348-491e-849d-51149e55f7cc","Type":"ContainerStarted","Data":"06ab4c8c0f2fb217baf18d182cbd5bd08581f110fbb66d0ff6a89d440f42859a"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.324863 4774 generic.go:334] "Generic (PLEG): container finished" podID="f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2" containerID="d7523e1b3299cd8fe8becd0ab758db7245307350e9880146210965d8653f6036" exitCode=0 Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.325240 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" event={"ID":"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2","Type":"ContainerDied","Data":"d7523e1b3299cd8fe8becd0ab758db7245307350e9880146210965d8653f6036"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.325297 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" event={"ID":"f871bfd8-4d9b-42ea-b6ff-ed75f4ea58e2","Type":"ContainerStarted","Data":"6ca3dccf94b7eadc93d38e6cb4f6c8dbadd2d6dbcaba707c71c410c76931b40a"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.325663 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9dv6b" podStartSLOduration=130.325649306 podStartE2EDuration="2m10.325649306s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.32191469 +0000 UTC m=+151.974113949" watchObservedRunningTime="2025-11-21 14:06:01.325649306 +0000 UTC m=+151.977848575" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.336941 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" event={"ID":"99ffd227-1170-4aa8-8232-519d2b605f26","Type":"ContainerStarted","Data":"217fc2a9dd62ba1ad36ef294bcdc05c6975928300985a68c03a848fe4302efe9"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.337016 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" event={"ID":"99ffd227-1170-4aa8-8232-519d2b605f26","Type":"ContainerStarted","Data":"22e6381a590278034e7f127aa2ca178877e06572fca44353ade6750220d7e584"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.347254 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.351081 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.851050567 +0000 UTC m=+152.503249836 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.372531 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" podStartSLOduration=129.372506106 podStartE2EDuration="2m9.372506106s" podCreationTimestamp="2025-11-21 14:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.357203801 +0000 UTC m=+152.009403070" watchObservedRunningTime="2025-11-21 14:06:01.372506106 +0000 UTC m=+152.024705375" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.379659 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" event={"ID":"a518d994-29bd-43a1-9dcb-870dd7d0ecdf","Type":"ContainerStarted","Data":"b8df6bfb928cbb113ca50a4e65704f0ab5caaf26831515ea57a242c0157b8cab"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.379719 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" event={"ID":"a518d994-29bd-43a1-9dcb-870dd7d0ecdf","Type":"ContainerStarted","Data":"d1a7aebb9aacb211d067b79454b814ee20eed4b7318f6150950420f0c0c2fcf4"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.392202 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" event={"ID":"c3717d22-e2b1-427b-8585-9ba3daa3b61c","Type":"ContainerStarted","Data":"fb8d99e748dab075da01bd6388764ebb76114b9de5d151ff61c0ec42cefa4723"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.407539 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f9llk" event={"ID":"827301f1-4fae-491e-bcf5-7c9319bbe1aa","Type":"ContainerStarted","Data":"66faa93d57b19bd81c51469da708a38d968af5c5a4cc1674c0a60c812def4237"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.420456 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" event={"ID":"efe34db3-8e05-439e-b576-50c0ee864dda","Type":"ContainerStarted","Data":"b531932dc2ce983b19ce4fce5984817a63538e18146d5ac183a4ab3d0e4d1f56"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.420513 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" event={"ID":"efe34db3-8e05-439e-b576-50c0ee864dda","Type":"ContainerStarted","Data":"3c13c288b1f3b8ebeea976ae7f078054778224a2ab47ce671520f2999575ebed"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.425954 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gmzc4" podStartSLOduration=130.421235739 podStartE2EDuration="2m10.421235739s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.390423504 +0000 UTC m=+152.042622773" watchObservedRunningTime="2025-11-21 14:06:01.421235739 +0000 UTC m=+152.073434998" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.427284 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" podStartSLOduration=130.42726809 podStartE2EDuration="2m10.42726809s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.420193179 +0000 UTC m=+152.072392448" watchObservedRunningTime="2025-11-21 14:06:01.42726809 +0000 UTC m=+152.079467349" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.445423 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" event={"ID":"735fdae4-98f5-4826-9d0f-92e784b93645","Type":"ContainerStarted","Data":"31703fb1432428c1cb6909489f406676143419f2cf7c93231a9edb3a1604ac66"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.446064 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" podStartSLOduration=130.445680983 podStartE2EDuration="2m10.445680983s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.443035758 +0000 UTC m=+152.095235017" watchObservedRunningTime="2025-11-21 14:06:01.445680983 +0000 UTC m=+152.097880242" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.450720 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.452059 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" event={"ID":"aa95d68b-3894-42cf-9af0-18b2575250c4","Type":"ContainerStarted","Data":"d366b8d35250bca8d57042d505f2d4ada6ec85fc3b0bdd4c961b8cc9f44de5a2"} Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.462671 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.462744 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.462935 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:01.962915072 +0000 UTC m=+152.615114331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.477366 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-s665q" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.501154 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-vlhqx" podStartSLOduration=130.501134187 podStartE2EDuration="2m10.501134187s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.47339853 +0000 UTC m=+152.125597799" watchObservedRunningTime="2025-11-21 14:06:01.501134187 +0000 UTC m=+152.153333446" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.506783 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" podStartSLOduration=130.506758677 podStartE2EDuration="2m10.506758677s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:01.499645705 +0000 UTC m=+152.151844964" watchObservedRunningTime="2025-11-21 14:06:01.506758677 +0000 UTC m=+152.158957956" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.551919 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.553073 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.053051081 +0000 UTC m=+152.705250340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.653926 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.654345 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.154329946 +0000 UTC m=+152.806529205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.664551 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:01 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:01 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:01 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.664612 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.754679 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.754860 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.254830808 +0000 UTC m=+152.907030067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.755383 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.756127 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.256114195 +0000 UTC m=+152.908313454 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.858424 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.858651 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.358614514 +0000 UTC m=+153.010813773 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.863320 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.864348 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.364318736 +0000 UTC m=+153.016517995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:01 crc kubenswrapper[4774]: I1121 14:06:01.964302 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:01 crc kubenswrapper[4774]: E1121 14:06:01.965025 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.465005855 +0000 UTC m=+153.117205114 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.066627 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.066970 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.566955558 +0000 UTC m=+153.219154817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.174256 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.174744 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.674728468 +0000 UTC m=+153.326927727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.276390 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.276707 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.776692102 +0000 UTC m=+153.428891361 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.377000 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.377224 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.877192445 +0000 UTC m=+153.529391714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.377338 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.377671 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.877658788 +0000 UTC m=+153.529858047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.462617 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e97366ca77c627e76c160a20d75e4aba2286a318ebdac868eee6053730bfc870"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.464803 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" event={"ID":"a6c4e0bf-1d46-41a8-9b64-fdcb10025225","Type":"ContainerStarted","Data":"cae0e80d954d379920a458adf4f0f67e807e776d55c5cc8e7b1eb60a0d4a9dda"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.466142 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" event={"ID":"d32e138f-edc8-459c-8cbf-9d1a07be8e67","Type":"ContainerStarted","Data":"94d5ecfcda9f93e96aa4f6d52ee9596218a488c3b68d61dc6609c0d1417ca8a4"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.467677 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" event={"ID":"cc8ceff5-9c76-4521-a560-d9e6424c93f8","Type":"ContainerStarted","Data":"51c3eb95b0d6be37dd22ef2b1aca7b3fe2681b15a586447a0bcf9e98febd5354"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.467854 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.469030 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" event={"ID":"aa95d68b-3894-42cf-9af0-18b2575250c4","Type":"ContainerStarted","Data":"3c9d0d6839eff2aad41b795c949bb392c5cf7779ab4b8bcbb72507cbd56b42e3"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.469390 4774 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-gfhfj container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.21:6443/healthz\": dial tcp 10.217.0.21:6443: connect: connection refused" start-of-body= Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.469481 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" podUID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.21:6443/healthz\": dial tcp 10.217.0.21:6443: connect: connection refused" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.470605 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" event={"ID":"565e5abf-8d99-4427-a923-0270e2080164","Type":"ContainerStarted","Data":"3e869c14737e97b3adc420e5531666de2a00f4fe093a216f798323fa431efc48"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.470745 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.471926 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"92b3bc877945ed3b4d61fc99379ff9ccef4feefb0776b6aaa22f840416bbb11a"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.472942 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" event={"ID":"4aea8951-3939-4012-966c-b0571f992df4","Type":"ContainerStarted","Data":"2114d486f39320a0ed11e3f3fcde82aece19974603afe5268eae303440a5f017"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.473542 4774 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gsdtl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.473605 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.474261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" event={"ID":"6ded279c-1738-42b5-8828-e4883c3756bf","Type":"ContainerStarted","Data":"299f54220e11fed8d06177277d60d5f97bb75be13a8cecb68b04c2f36c231d4d"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.474311 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" event={"ID":"6ded279c-1738-42b5-8828-e4883c3756bf","Type":"ContainerStarted","Data":"f13864e1f5afc377a10b56398c87d4a4240ca6737f56f766feb890f7e60b9d04"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.476692 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" event={"ID":"99f783ba-3348-491e-849d-51149e55f7cc","Type":"ContainerStarted","Data":"76dc21a667abd2c6adde438c7d9d289969400641e464f5f7b8c06b90f274d06e"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.476730 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" event={"ID":"99f783ba-3348-491e-849d-51149e55f7cc","Type":"ContainerStarted","Data":"8780f5cc1e18f29d9b0b9f4bfdf10131003f5d6b3c95c3c5f5a2a0e3c34c1f9a"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.477863 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.477883 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" event={"ID":"577947c3-d018-41ae-9c69-5443707c1073","Type":"ContainerStarted","Data":"7971798fe7cd62894b0f74ac47d1cdee76d8968e3b54188c4ed0afdae2feb5ef"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.478305 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.478570 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:02.978550532 +0000 UTC m=+153.630749791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.479525 4774 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-vbl6m container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.479568 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" podUID="577947c3-d018-41ae-9c69-5443707c1073" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.479703 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-bjlhz" event={"ID":"8bb3bcd7-68a3-421d-8f13-bbd481b4bd6d","Type":"ContainerStarted","Data":"7af2a77c68f2fab716208150e85cafaba2ae3af4a234c57be86eb936d2109501"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.496471 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" event={"ID":"fc59f9fd-1ad7-4bd5-966c-9a77ee22fdeb","Type":"ContainerStarted","Data":"092168b09c6f81d2479913a99134fac065b212d0431246b6a8dfbd25aee981e9"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.504510 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-c8jn9" event={"ID":"c3717d22-e2b1-427b-8585-9ba3daa3b61c","Type":"ContainerStarted","Data":"9d6c35c509568100968dcf244651e2fccf23d7370be634421c43eacf5c6e8ba3"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.514003 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f9llk" event={"ID":"827301f1-4fae-491e-bcf5-7c9319bbe1aa","Type":"ContainerStarted","Data":"1358410394b4bb67a514e7192b90d221ad4213b902f102f69a2a5979fd56d35a"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.514066 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-f9llk" event={"ID":"827301f1-4fae-491e-bcf5-7c9319bbe1aa","Type":"ContainerStarted","Data":"6f67b463d790d0c57bacf46dd6bbfb7a85659f9dcdd2998ffe1c36e28bee6f4e"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.514194 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-f9llk" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.521499 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" event={"ID":"968f0685-b314-4b9f-86d0-0e39704e8083","Type":"ContainerStarted","Data":"831e628ddf4023e5d52795c0a42b219c3e7026c2023f93449e4768a59ef6d2ef"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.526645 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" event={"ID":"735fdae4-98f5-4826-9d0f-92e784b93645","Type":"ContainerStarted","Data":"7496ac689a3669c5dd7b002ff372d99134724ad9f9f04f8a1bd257efff1932b3"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.526693 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" event={"ID":"735fdae4-98f5-4826-9d0f-92e784b93645","Type":"ContainerStarted","Data":"ff9e847074109f697cbf830c2c9e7c74d41979aa00f85852d3e3076b7958efb6"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.529954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" event={"ID":"1b4aca26-5092-42ac-ac83-9bee708dbfcf","Type":"ContainerStarted","Data":"d17bb0a7000ce4a554450ecb1c7e46316ce7791bce121e4897d38e6f12058054"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.530011 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" event={"ID":"1b4aca26-5092-42ac-ac83-9bee708dbfcf","Type":"ContainerStarted","Data":"e72015302ef9ba03d615733121cf5f45997e750913d72b13d2533bf051b0f01d"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.533201 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" event={"ID":"1cff40e3-a3a2-4cfb-9cf1-a30cbae71000","Type":"ContainerStarted","Data":"923a85b7a291bc1987400e75a182866b0bd37a1f8bce2c61baec06a348747280"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.551216 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" event={"ID":"15b8faa6-1708-463a-9371-033ee86fd845","Type":"ContainerStarted","Data":"3a904e631afffb9cc206b896659acc174c567ee9997f41ba58ca5b25b5124513"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.551282 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" event={"ID":"15b8faa6-1708-463a-9371-033ee86fd845","Type":"ContainerStarted","Data":"1747597c9c6ddd7492aca5c54c45f870379a19a789b5ad63c992449c1d24afe5"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.557971 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.558284 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.567170 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" event={"ID":"c9c60662-2ab1-49a1-8fda-0103d3f8cc78","Type":"ContainerStarted","Data":"19abdf18b7e837abc75e2aab8fb735e1bc4a0431ca78611f421de31f0ee27f63"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.567218 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" event={"ID":"c9c60662-2ab1-49a1-8fda-0103d3f8cc78","Type":"ContainerStarted","Data":"c10e4de7a2956de10dd5f2b04cd4704d71630ed137347c47a2096c5b6df9fb41"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.567410 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.580966 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.581865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" event={"ID":"adc5270c-e6f1-4f6d-b6db-62395196bc1c","Type":"ContainerStarted","Data":"0fd0cb03ace374c6f5ef8cdcd59cf94acc86ab45b220c1a43ef734ef9a87a9b5"} Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.589409 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.089388748 +0000 UTC m=+153.741588047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.589645 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" event={"ID":"cc511127-ef34-4387-986d-4d1228a730d4","Type":"ContainerStarted","Data":"d5ef9d833fb74ac33f475672e83a41e245769d057bbc8eddb4ff4c6df4c6e528"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.591283 4774 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-8kj7t container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.591323 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" podUID="cc511127-ef34-4387-986d-4d1228a730d4" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.619317 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" event={"ID":"f262d7bf-02db-4d70-8c81-af3e592ae0d4","Type":"ContainerStarted","Data":"a8b487104263e3bd284f4be7f1a485c8523d8b91cf64233077afc8f7305a12ae"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.644121 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7171182dc8d0ca8c9cf7da7ef601f5ca875a30f01f5e785d41538c48138f954d"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.644779 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.655755 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:02 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:02 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:02 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.655805 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.668611 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-9mrfc" event={"ID":"a518d994-29bd-43a1-9dcb-870dd7d0ecdf","Type":"ContainerStarted","Data":"d1fa881b147bad7eae46de821bb15e21a51aa975c0962bd1ecb598d892bfb421"} Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.683512 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.684432 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.184417096 +0000 UTC m=+153.836616355 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.691577 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.706902 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" podStartSLOduration=131.706876383 podStartE2EDuration="2m11.706876383s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:02.655465414 +0000 UTC m=+153.307664683" watchObservedRunningTime="2025-11-21 14:06:02.706876383 +0000 UTC m=+153.359075652" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.792157 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.793250 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.798765 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6txj8" podStartSLOduration=131.798745461 podStartE2EDuration="2m11.798745461s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:02.712180104 +0000 UTC m=+153.364379373" watchObservedRunningTime="2025-11-21 14:06:02.798745461 +0000 UTC m=+153.450944720" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.800493 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-8h89j" podStartSLOduration=131.80048826 podStartE2EDuration="2m11.80048826s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:02.792224356 +0000 UTC m=+153.444423615" watchObservedRunningTime="2025-11-21 14:06:02.80048826 +0000 UTC m=+153.452687509" Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.810699 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.31067518 +0000 UTC m=+153.962874439 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.894093 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.895147 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.395126997 +0000 UTC m=+154.047326256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.954159 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" podStartSLOduration=132.954140982 podStartE2EDuration="2m12.954140982s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:02.90263766 +0000 UTC m=+153.554836919" watchObservedRunningTime="2025-11-21 14:06:02.954140982 +0000 UTC m=+153.606340241" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.955234 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" podStartSLOduration=131.955229243 podStartE2EDuration="2m11.955229243s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:02.952467855 +0000 UTC m=+153.604667114" watchObservedRunningTime="2025-11-21 14:06:02.955229243 +0000 UTC m=+153.607428502" Nov 21 14:06:02 crc kubenswrapper[4774]: I1121 14:06:02.995389 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:02 crc kubenswrapper[4774]: E1121 14:06:02.995795 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.495782674 +0000 UTC m=+154.147981933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.015271 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" podStartSLOduration=132.015252887 podStartE2EDuration="2m12.015252887s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.012740065 +0000 UTC m=+153.664939314" watchObservedRunningTime="2025-11-21 14:06:03.015252887 +0000 UTC m=+153.667452146" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.097463 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.097950 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.597933624 +0000 UTC m=+154.250132883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.112462 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-bjlhz" podStartSLOduration=9.112431445 podStartE2EDuration="9.112431445s" podCreationTimestamp="2025-11-21 14:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.050664522 +0000 UTC m=+153.702863781" watchObservedRunningTime="2025-11-21 14:06:03.112431445 +0000 UTC m=+153.764630704" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.115221 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-kj4js" podStartSLOduration=132.115209454 podStartE2EDuration="2m12.115209454s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.111310714 +0000 UTC m=+153.763509973" watchObservedRunningTime="2025-11-21 14:06:03.115209454 +0000 UTC m=+153.767408713" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.148022 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.194214 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-mcznv" podStartSLOduration=131.194189656 podStartE2EDuration="2m11.194189656s" podCreationTimestamp="2025-11-21 14:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.190698377 +0000 UTC m=+153.842897636" watchObservedRunningTime="2025-11-21 14:06:03.194189656 +0000 UTC m=+153.846388915" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.199517 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.199860 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.699847207 +0000 UTC m=+154.352046696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.243976 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-gbv24" podStartSLOduration=132.243945428 podStartE2EDuration="2m12.243945428s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.242524138 +0000 UTC m=+153.894723397" watchObservedRunningTime="2025-11-21 14:06:03.243945428 +0000 UTC m=+153.896144687" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.288647 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-lz8fs" podStartSLOduration=132.288631477 podStartE2EDuration="2m12.288631477s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.287899256 +0000 UTC m=+153.940098515" watchObservedRunningTime="2025-11-21 14:06:03.288631477 +0000 UTC m=+153.940830736" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.300444 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.300702 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.800687219 +0000 UTC m=+154.452886478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.402334 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.402671 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:03.902656684 +0000 UTC m=+154.554855943 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.503347 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.503765 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.003745633 +0000 UTC m=+154.655944892 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.533726 4774 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jpftc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.534145 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" podUID="1cff40e3-a3a2-4cfb-9cf1-a30cbae71000" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.539626 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-f9llk" podStartSLOduration=9.539603231 podStartE2EDuration="9.539603231s" podCreationTimestamp="2025-11-21 14:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.496871168 +0000 UTC m=+154.149070427" watchObservedRunningTime="2025-11-21 14:06:03.539603231 +0000 UTC m=+154.191802490" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.540795 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gdvrm" podStartSLOduration=131.540786455 podStartE2EDuration="2m11.540786455s" podCreationTimestamp="2025-11-21 14:03:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.536957666 +0000 UTC m=+154.189156925" watchObservedRunningTime="2025-11-21 14:06:03.540786455 +0000 UTC m=+154.192985714" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.605046 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.605408 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.105393389 +0000 UTC m=+154.757592648 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.631034 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-7psfz" podStartSLOduration=132.631018466 podStartE2EDuration="2m12.631018466s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.564964981 +0000 UTC m=+154.217164240" watchObservedRunningTime="2025-11-21 14:06:03.631018466 +0000 UTC m=+154.283217725" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.653871 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" podStartSLOduration=132.653853184 podStartE2EDuration="2m12.653853184s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.653411912 +0000 UTC m=+154.305611181" watchObservedRunningTime="2025-11-21 14:06:03.653853184 +0000 UTC m=+154.306052443" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.663002 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:03 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:03 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:03 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.663059 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.705598 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.706096 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.206077727 +0000 UTC m=+154.858276986 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.709590 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" event={"ID":"d32e138f-edc8-459c-8cbf-9d1a07be8e67","Type":"ContainerStarted","Data":"b2e5398a9c20c9cc5da4281b80f43263383af70a3a28b1a160598c0ca9fb5b28"} Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.723325 4774 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-gsdtl container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.723381 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.748722 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-79zhf" podStartSLOduration=132.748699496 podStartE2EDuration="2m12.748699496s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:03.695352002 +0000 UTC m=+154.347551291" watchObservedRunningTime="2025-11-21 14:06:03.748699496 +0000 UTC m=+154.400898765" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.749036 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-44rg5" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.762545 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-8kj7t" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.763714 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vbl6m" Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.807514 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.818749 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.318733904 +0000 UTC m=+154.970933163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:03 crc kubenswrapper[4774]: I1121 14:06:03.909580 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:03 crc kubenswrapper[4774]: E1121 14:06:03.910090 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.410073426 +0000 UTC m=+155.062272685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.010978 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.011400 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.511383422 +0000 UTC m=+155.163582681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.111799 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.111987 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.611945857 +0000 UTC m=+155.264145116 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.112573 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.113048 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.613023987 +0000 UTC m=+155.265238857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.213659 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.213897 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.71386648 +0000 UTC m=+155.366065739 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.213975 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.214304 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.714292602 +0000 UTC m=+155.366491861 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.314932 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.315075 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.815048282 +0000 UTC m=+155.467247541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.315332 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.315712 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.81569907 +0000 UTC m=+155.467898329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.417004 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.417257 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.917216282 +0000 UTC m=+155.569415541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.417365 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.417792 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:04.917766678 +0000 UTC m=+155.569965937 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.518318 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.518769 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.018748664 +0000 UTC m=+155.670947923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.619998 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.620467 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.120448351 +0000 UTC m=+155.772647610 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.653051 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:04 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:04 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:04 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.653160 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.707231 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.711699 4774 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jpftc container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.711760 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" podUID="1cff40e3-a3a2-4cfb-9cf1-a30cbae71000" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.716794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" event={"ID":"d32e138f-edc8-459c-8cbf-9d1a07be8e67","Type":"ContainerStarted","Data":"1c5ff5ac3e4f2e3409cc71504cbcaa48d6b69fd9f7e8e64f1082ac9db2b68b56"} Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.721443 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.721846 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.221803118 +0000 UTC m=+155.874002377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.823552 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.829111 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.329093774 +0000 UTC m=+155.981293043 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.898190 4774 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.924429 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:04 crc kubenswrapper[4774]: E1121 14:06:04.924917 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.424897583 +0000 UTC m=+156.077096862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.935286 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zd7lj"] Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.936619 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.939201 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 21 14:06:04 crc kubenswrapper[4774]: I1121 14:06:04.960093 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zd7lj"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.025932 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.026032 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-utilities\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.026056 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-catalog-content\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.026085 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svs9t\" (UniqueName: \"kubernetes.io/projected/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-kube-api-access-svs9t\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.026285 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.526268911 +0000 UTC m=+156.178468170 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.109263 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xt4q5"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.110694 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.114962 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.126993 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.127160 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.627137844 +0000 UTC m=+156.279337103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127219 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-utilities\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127283 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127332 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-catalog-content\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127366 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-catalog-content\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127394 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-utilities\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127420 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhzrk\" (UniqueName: \"kubernetes.io/projected/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-kube-api-access-fhzrk\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.127448 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svs9t\" (UniqueName: \"kubernetes.io/projected/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-kube-api-access-svs9t\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.128267 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xt4q5"] Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.128314 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.628301687 +0000 UTC m=+156.280500946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.128890 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-catalog-content\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.129021 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-utilities\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.154127 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svs9t\" (UniqueName: \"kubernetes.io/projected/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-kube-api-access-svs9t\") pod \"community-operators-zd7lj\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.228595 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.228813 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.728774519 +0000 UTC m=+156.380973778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.229353 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-utilities\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.229430 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.229473 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-catalog-content\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.229500 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhzrk\" (UniqueName: \"kubernetes.io/projected/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-kube-api-access-fhzrk\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.230101 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.730088416 +0000 UTC m=+156.382287675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.230126 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-utilities\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.230198 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-catalog-content\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.254805 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.262279 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhzrk\" (UniqueName: \"kubernetes.io/projected/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-kube-api-access-fhzrk\") pod \"certified-operators-xt4q5\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.305336 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-h7j28"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.306580 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.330120 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.330338 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.830292891 +0000 UTC m=+156.482492150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.330460 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.330554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-catalog-content\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.330597 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9kbt\" (UniqueName: \"kubernetes.io/projected/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-kube-api-access-b9kbt\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.330624 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-utilities\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: E1121 14:06:05.331014 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-21 14:06:05.830997651 +0000 UTC m=+156.483196910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pgwc4" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.332529 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h7j28"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.352368 4774 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-21T14:06:04.898225726Z","Handler":null,"Name":""} Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.372730 4774 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.372776 4774 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.432670 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.433038 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.433576 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-catalog-content\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.433615 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9kbt\" (UniqueName: \"kubernetes.io/projected/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-kube-api-access-b9kbt\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.433647 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-utilities\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.434339 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-utilities\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.434359 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-catalog-content\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.484596 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9kbt\" (UniqueName: \"kubernetes.io/projected/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-kube-api-access-b9kbt\") pod \"community-operators-h7j28\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.521279 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lrb9s"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.522251 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.536383 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5x25\" (UniqueName: \"kubernetes.io/projected/47f45e51-ad6b-4fb3-8777-92d9879a72cb-kube-api-access-t5x25\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.536444 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-utilities\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.536468 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-catalog-content\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.586467 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.602227 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.603043 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.606203 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.606470 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.623492 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lrb9s"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.625336 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642414 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642551 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642589 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5x25\" (UniqueName: \"kubernetes.io/projected/47f45e51-ad6b-4fb3-8777-92d9879a72cb-kube-api-access-t5x25\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642610 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-utilities\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642642 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-catalog-content\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642665 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.642734 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.643660 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-utilities\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.643943 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-catalog-content\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.676455 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.676711 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.678450 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:05 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:05 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:05 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.678486 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.732079 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5x25\" (UniqueName: \"kubernetes.io/projected/47f45e51-ad6b-4fb3-8777-92d9879a72cb-kube-api-access-t5x25\") pod \"certified-operators-lrb9s\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.758843 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.758955 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.759466 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.780233 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" event={"ID":"d32e138f-edc8-459c-8cbf-9d1a07be8e67","Type":"ContainerStarted","Data":"25219b231424b9203d249322dcbf1a647afdc5ce2999a7f5192ed4de3a4c2804"} Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.786378 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.812883 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-zpzzf" podStartSLOduration=11.812806577 podStartE2EDuration="11.812806577s" podCreationTimestamp="2025-11-21 14:05:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:05.811064088 +0000 UTC m=+156.463263347" watchObservedRunningTime="2025-11-21 14:06:05.812806577 +0000 UTC m=+156.465005836" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.900539 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:06:05 crc kubenswrapper[4774]: I1121 14:06:05.973334 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pgwc4\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.036286 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.120665 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.121436 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zd7lj"] Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.269162 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.365180 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xt4q5"] Nov 21 14:06:06 crc kubenswrapper[4774]: W1121 14:06:06.386065 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8349c9b_3b4f_4b5d_ab74_d70fcc3e9411.slice/crio-005e24275f47c825f8be76617a2dbf480fa024196b9700364f5f7d8ea06a3230 WatchSource:0}: Error finding container 005e24275f47c825f8be76617a2dbf480fa024196b9700364f5f7d8ea06a3230: Status 404 returned error can't find the container with id 005e24275f47c825f8be76617a2dbf480fa024196b9700364f5f7d8ea06a3230 Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.518976 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-h7j28"] Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.563318 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.670057 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:06 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:06 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:06 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.670112 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.733254 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-zsxng" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.782772 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lrb9s"] Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.805321 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"653e9ec6-e80c-43cd-8d71-f194fc0e40fa","Type":"ContainerStarted","Data":"e621dd1d13b6906ed1dc45dda2009616a3a3b635a4aa77b254f67b82c99effab"} Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.813826 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerStarted","Data":"b8ce4dc14e7577582eb7852d0655efcf66a21abffd81e266a315b61d2f29aa12"} Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.813870 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerStarted","Data":"005e24275f47c825f8be76617a2dbf480fa024196b9700364f5f7d8ea06a3230"} Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.815975 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.826779 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerID="c57083f8663787da9f33f703979fbfcb4e4abe324070aee75e4a275bb00d1cbc" exitCode=0 Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.826899 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zd7lj" event={"ID":"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a","Type":"ContainerDied","Data":"c57083f8663787da9f33f703979fbfcb4e4abe324070aee75e4a275bb00d1cbc"} Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.826933 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zd7lj" event={"ID":"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a","Type":"ContainerStarted","Data":"73536bdf25aea75d3fe81ffa320460732955869cf7422f7c5dfbc9f159c21ff5"} Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.871213 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h7j28" event={"ID":"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522","Type":"ContainerStarted","Data":"1e7d3601aa68e799ab4f4d951b44d48e4f862d708d5c3fd994032b3e1bb23f6c"} Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.926900 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q922n"] Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.928393 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.933196 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.944992 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pgwc4"] Nov 21 14:06:06 crc kubenswrapper[4774]: I1121 14:06:06.980517 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q922n"] Nov 21 14:06:07 crc kubenswrapper[4774]: W1121 14:06:06.995483 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1760383_3b9d_4c38_b474_75ec72a82819.slice/crio-aa3d40005519f42c657889685eed5a3779ef270b3ad0911876b637f707b100ad WatchSource:0}: Error finding container aa3d40005519f42c657889685eed5a3779ef270b3ad0911876b637f707b100ad: Status 404 returned error can't find the container with id aa3d40005519f42c657889685eed5a3779ef270b3ad0911876b637f707b100ad Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.025894 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-utilities\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.026415 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-catalog-content\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.026504 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kh44\" (UniqueName: \"kubernetes.io/projected/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-kube-api-access-8kh44\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.127705 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kh44\" (UniqueName: \"kubernetes.io/projected/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-kube-api-access-8kh44\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.127803 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-utilities\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.127904 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-catalog-content\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.128490 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-catalog-content\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.128766 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-utilities\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.153529 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kh44\" (UniqueName: \"kubernetes.io/projected/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-kube-api-access-8kh44\") pod \"redhat-marketplace-q922n\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.297140 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-m7lsv"] Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.298458 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.309440 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m7lsv"] Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.335326 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.432798 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.433126 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.433263 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4x92\" (UniqueName: \"kubernetes.io/projected/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-kube-api-access-f4x92\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.433347 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-utilities\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.433479 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-catalog-content\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.435191 4774 patch_prober.go:28] interesting pod/console-f9d7485db-w7tjv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.435246 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-w7tjv" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.446336 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.446502 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.453467 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.525182 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.525240 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.526537 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.526586 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.535296 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-utilities\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.535462 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-catalog-content\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.535564 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4x92\" (UniqueName: \"kubernetes.io/projected/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-kube-api-access-f4x92\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.537502 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-catalog-content\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.538884 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-utilities\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.567922 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q922n"] Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.568506 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4x92\" (UniqueName: \"kubernetes.io/projected/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-kube-api-access-f4x92\") pod \"redhat-marketplace-m7lsv\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: W1121 14:06:07.575119 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8600e8eb_acb2_4a1c_9aaf_53e02ddbe2c2.slice/crio-f9a49edc0454ab4fee1c38f1b177d5aa4c93df7184a9f046edd771556dba3cec WatchSource:0}: Error finding container f9a49edc0454ab4fee1c38f1b177d5aa4c93df7184a9f046edd771556dba3cec: Status 404 returned error can't find the container with id f9a49edc0454ab4fee1c38f1b177d5aa4c93df7184a9f046edd771556dba3cec Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.614065 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.649206 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.654338 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:07 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:07 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:07 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.654425 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.851238 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-m7lsv"] Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.884586 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerStarted","Data":"8f08e873b8e1e711883e01a50283f7513d9eeaebadd417124bc9542aefc94985"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.884627 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerStarted","Data":"f9a49edc0454ab4fee1c38f1b177d5aa4c93df7184a9f046edd771556dba3cec"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.909951 4774 generic.go:334] "Generic (PLEG): container finished" podID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerID="b8ce4dc14e7577582eb7852d0655efcf66a21abffd81e266a315b61d2f29aa12" exitCode=0 Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.910065 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerDied","Data":"b8ce4dc14e7577582eb7852d0655efcf66a21abffd81e266a315b61d2f29aa12"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.916780 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" event={"ID":"f1760383-3b9d-4c38-b474-75ec72a82819","Type":"ContainerStarted","Data":"a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.916856 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" event={"ID":"f1760383-3b9d-4c38-b474-75ec72a82819","Type":"ContainerStarted","Data":"aa3d40005519f42c657889685eed5a3779ef270b3ad0911876b637f707b100ad"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.917148 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.927962 4774 generic.go:334] "Generic (PLEG): container finished" podID="aa95d68b-3894-42cf-9af0-18b2575250c4" containerID="3c9d0d6839eff2aad41b795c949bb392c5cf7779ab4b8bcbb72507cbd56b42e3" exitCode=0 Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.928051 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" event={"ID":"aa95d68b-3894-42cf-9af0-18b2575250c4","Type":"ContainerDied","Data":"3c9d0d6839eff2aad41b795c949bb392c5cf7779ab4b8bcbb72507cbd56b42e3"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.932069 4774 generic.go:334] "Generic (PLEG): container finished" podID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerID="48f350b9cc835725bad877b57431c6223f163c3e7e579648842b4043d38eac68" exitCode=0 Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.932191 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h7j28" event={"ID":"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522","Type":"ContainerDied","Data":"48f350b9cc835725bad877b57431c6223f163c3e7e579648842b4043d38eac68"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.935585 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"653e9ec6-e80c-43cd-8d71-f194fc0e40fa","Type":"ContainerStarted","Data":"b16e38f3b32fe1464659699aee30da15a622058dc4311935cc214a610036493d"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.937947 4774 generic.go:334] "Generic (PLEG): container finished" podID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerID="4373ec705b3dc1fb67c8df8856024caff9942873e127b5ad8b8ee1cba61414b4" exitCode=0 Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.938748 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerDied","Data":"4373ec705b3dc1fb67c8df8856024caff9942873e127b5ad8b8ee1cba61414b4"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.938779 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerStarted","Data":"a601e538acdbdb997cf29521c48401cf774aaeded8bcad2404ca50c2c2eeda32"} Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.945232 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-wgq25" Nov 21 14:06:07 crc kubenswrapper[4774]: I1121 14:06:07.961134 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" podStartSLOduration=136.961116538 podStartE2EDuration="2m16.961116538s" podCreationTimestamp="2025-11-21 14:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:07.959927314 +0000 UTC m=+158.612126603" watchObservedRunningTime="2025-11-21 14:06:07.961116538 +0000 UTC m=+158.613315797" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.062239 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.062216698 podStartE2EDuration="3.062216698s" podCreationTimestamp="2025-11-21 14:06:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:06:08.060245082 +0000 UTC m=+158.712444341" watchObservedRunningTime="2025-11-21 14:06:08.062216698 +0000 UTC m=+158.714415957" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.162140 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jpftc" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.167740 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.302813 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k42jk"] Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.306784 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.312553 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.323129 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k42jk"] Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.358370 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6226j\" (UniqueName: \"kubernetes.io/projected/fe5f4376-0558-4824-a2d8-119c74a082eb-kube-api-access-6226j\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.358463 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-catalog-content\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.358529 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-utilities\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.459858 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-catalog-content\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.459907 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-utilities\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.459981 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6226j\" (UniqueName: \"kubernetes.io/projected/fe5f4376-0558-4824-a2d8-119c74a082eb-kube-api-access-6226j\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.460439 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-catalog-content\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.460676 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-utilities\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.487938 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6226j\" (UniqueName: \"kubernetes.io/projected/fe5f4376-0558-4824-a2d8-119c74a082eb-kube-api-access-6226j\") pod \"redhat-operators-k42jk\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.629482 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.653626 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:08 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:08 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:08 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.653692 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.728435 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tnpl6"] Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.729845 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.755650 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnpl6"] Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.871893 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-utilities\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.872413 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8g6z\" (UniqueName: \"kubernetes.io/projected/f7082230-9ac1-49e5-adb3-4a9178f9f796-kube-api-access-z8g6z\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.872506 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-catalog-content\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.946386 4774 generic.go:334] "Generic (PLEG): container finished" podID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerID="0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3" exitCode=0 Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.946482 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerDied","Data":"0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3"} Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.946515 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerStarted","Data":"86ac864af53183f329aeb1fa95a14370b73ea7e146fe2b16df4b35685af95902"} Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.951081 4774 generic.go:334] "Generic (PLEG): container finished" podID="653e9ec6-e80c-43cd-8d71-f194fc0e40fa" containerID="b16e38f3b32fe1464659699aee30da15a622058dc4311935cc214a610036493d" exitCode=0 Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.951143 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"653e9ec6-e80c-43cd-8d71-f194fc0e40fa","Type":"ContainerDied","Data":"b16e38f3b32fe1464659699aee30da15a622058dc4311935cc214a610036493d"} Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.956167 4774 generic.go:334] "Generic (PLEG): container finished" podID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerID="8f08e873b8e1e711883e01a50283f7513d9eeaebadd417124bc9542aefc94985" exitCode=0 Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.956417 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerDied","Data":"8f08e873b8e1e711883e01a50283f7513d9eeaebadd417124bc9542aefc94985"} Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.975629 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-catalog-content\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.975722 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-utilities\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.975746 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8g6z\" (UniqueName: \"kubernetes.io/projected/f7082230-9ac1-49e5-adb3-4a9178f9f796-kube-api-access-z8g6z\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.976291 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-catalog-content\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:08 crc kubenswrapper[4774]: I1121 14:06:08.976541 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-utilities\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.003356 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8g6z\" (UniqueName: \"kubernetes.io/projected/f7082230-9ac1-49e5-adb3-4a9178f9f796-kube-api-access-z8g6z\") pod \"redhat-operators-tnpl6\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.142185 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.357155 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.372384 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k42jk"] Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.402552 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa95d68b-3894-42cf-9af0-18b2575250c4-config-volume\") pod \"aa95d68b-3894-42cf-9af0-18b2575250c4\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.402653 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa95d68b-3894-42cf-9af0-18b2575250c4-secret-volume\") pod \"aa95d68b-3894-42cf-9af0-18b2575250c4\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.402803 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2279k\" (UniqueName: \"kubernetes.io/projected/aa95d68b-3894-42cf-9af0-18b2575250c4-kube-api-access-2279k\") pod \"aa95d68b-3894-42cf-9af0-18b2575250c4\" (UID: \"aa95d68b-3894-42cf-9af0-18b2575250c4\") " Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.403445 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa95d68b-3894-42cf-9af0-18b2575250c4-config-volume" (OuterVolumeSpecName: "config-volume") pod "aa95d68b-3894-42cf-9af0-18b2575250c4" (UID: "aa95d68b-3894-42cf-9af0-18b2575250c4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.409483 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa95d68b-3894-42cf-9af0-18b2575250c4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "aa95d68b-3894-42cf-9af0-18b2575250c4" (UID: "aa95d68b-3894-42cf-9af0-18b2575250c4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.415882 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa95d68b-3894-42cf-9af0-18b2575250c4-kube-api-access-2279k" (OuterVolumeSpecName: "kube-api-access-2279k") pod "aa95d68b-3894-42cf-9af0-18b2575250c4" (UID: "aa95d68b-3894-42cf-9af0-18b2575250c4"). InnerVolumeSpecName "kube-api-access-2279k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.503972 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2279k\" (UniqueName: \"kubernetes.io/projected/aa95d68b-3894-42cf-9af0-18b2575250c4-kube-api-access-2279k\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.504009 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/aa95d68b-3894-42cf-9af0-18b2575250c4-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.504019 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/aa95d68b-3894-42cf-9af0-18b2575250c4-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.540653 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnpl6"] Nov 21 14:06:09 crc kubenswrapper[4774]: W1121 14:06:09.556694 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7082230_9ac1_49e5_adb3_4a9178f9f796.slice/crio-87bdf4b9420b251dd50d584e4ccbf8dfad92f42ce8be121c1340f881675b4f8f WatchSource:0}: Error finding container 87bdf4b9420b251dd50d584e4ccbf8dfad92f42ce8be121c1340f881675b4f8f: Status 404 returned error can't find the container with id 87bdf4b9420b251dd50d584e4ccbf8dfad92f42ce8be121c1340f881675b4f8f Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.652804 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:09 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:09 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:09 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.652895 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.966154 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerStarted","Data":"87bdf4b9420b251dd50d584e4ccbf8dfad92f42ce8be121c1340f881675b4f8f"} Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.969375 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerStarted","Data":"9fad8a905c9c86561980a2e4259d35c7c10c9e3884bb768f096a7d0d388541fe"} Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.971716 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" event={"ID":"aa95d68b-3894-42cf-9af0-18b2575250c4","Type":"ContainerDied","Data":"d366b8d35250bca8d57042d505f2d4ada6ec85fc3b0bdd4c961b8cc9f44de5a2"} Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.971754 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d366b8d35250bca8d57042d505f2d4ada6ec85fc3b0bdd4c961b8cc9f44de5a2" Nov 21 14:06:09 crc kubenswrapper[4774]: I1121 14:06:09.971804 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.074125 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 21 14:06:10 crc kubenswrapper[4774]: E1121 14:06:10.074427 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa95d68b-3894-42cf-9af0-18b2575250c4" containerName="collect-profiles" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.074448 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa95d68b-3894-42cf-9af0-18b2575250c4" containerName="collect-profiles" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.074588 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa95d68b-3894-42cf-9af0-18b2575250c4" containerName="collect-profiles" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.075159 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.080893 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.082056 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.084019 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.121968 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43d5d1c-936d-4928-a347-657f630b6c6c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.122042 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43d5d1c-936d-4928-a347-657f630b6c6c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.223096 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43d5d1c-936d-4928-a347-657f630b6c6c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.223140 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43d5d1c-936d-4928-a347-657f630b6c6c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.223680 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43d5d1c-936d-4928-a347-657f630b6c6c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.242176 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43d5d1c-936d-4928-a347-657f630b6c6c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.342213 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.406543 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.434516 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kube-api-access\") pod \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.434684 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kubelet-dir\") pod \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\" (UID: \"653e9ec6-e80c-43cd-8d71-f194fc0e40fa\") " Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.434841 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "653e9ec6-e80c-43cd-8d71-f194fc0e40fa" (UID: "653e9ec6-e80c-43cd-8d71-f194fc0e40fa"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.435598 4774 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.437764 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "653e9ec6-e80c-43cd-8d71-f194fc0e40fa" (UID: "653e9ec6-e80c-43cd-8d71-f194fc0e40fa"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.537124 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/653e9ec6-e80c-43cd-8d71-f194fc0e40fa-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.656181 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:10 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:10 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:10 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.656255 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.703640 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 21 14:06:10 crc kubenswrapper[4774]: W1121 14:06:10.721784 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf43d5d1c_936d_4928_a347_657f630b6c6c.slice/crio-cf6a5cf986d7c38b04ee3dbcfc6ad77f1933f509d20f4cefa98a5e08798c0a75 WatchSource:0}: Error finding container cf6a5cf986d7c38b04ee3dbcfc6ad77f1933f509d20f4cefa98a5e08798c0a75: Status 404 returned error can't find the container with id cf6a5cf986d7c38b04ee3dbcfc6ad77f1933f509d20f4cefa98a5e08798c0a75 Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.990689 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerStarted","Data":"dfac67ee8bb59cfe001b08af5a6081a1c259a4b6aa029819a8d2cb7ae96f9798"} Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.994807 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"653e9ec6-e80c-43cd-8d71-f194fc0e40fa","Type":"ContainerDied","Data":"e621dd1d13b6906ed1dc45dda2009616a3a3b635a4aa77b254f67b82c99effab"} Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.994864 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.994882 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e621dd1d13b6906ed1dc45dda2009616a3a3b635a4aa77b254f67b82c99effab" Nov 21 14:06:10 crc kubenswrapper[4774]: I1121 14:06:10.999767 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerStarted","Data":"f80abea7512fdcb241239e378640112fc083163b50216c2de33a818d946e29b4"} Nov 21 14:06:11 crc kubenswrapper[4774]: I1121 14:06:11.012751 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f43d5d1c-936d-4928-a347-657f630b6c6c","Type":"ContainerStarted","Data":"cf6a5cf986d7c38b04ee3dbcfc6ad77f1933f509d20f4cefa98a5e08798c0a75"} Nov 21 14:06:11 crc kubenswrapper[4774]: I1121 14:06:11.653553 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:11 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:11 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:11 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:11 crc kubenswrapper[4774]: I1121 14:06:11.653623 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:12 crc kubenswrapper[4774]: I1121 14:06:12.026559 4774 generic.go:334] "Generic (PLEG): container finished" podID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerID="dfac67ee8bb59cfe001b08af5a6081a1c259a4b6aa029819a8d2cb7ae96f9798" exitCode=0 Nov 21 14:06:12 crc kubenswrapper[4774]: I1121 14:06:12.026600 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerDied","Data":"dfac67ee8bb59cfe001b08af5a6081a1c259a4b6aa029819a8d2cb7ae96f9798"} Nov 21 14:06:12 crc kubenswrapper[4774]: I1121 14:06:12.029672 4774 generic.go:334] "Generic (PLEG): container finished" podID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerID="f80abea7512fdcb241239e378640112fc083163b50216c2de33a818d946e29b4" exitCode=0 Nov 21 14:06:12 crc kubenswrapper[4774]: I1121 14:06:12.029725 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerDied","Data":"f80abea7512fdcb241239e378640112fc083163b50216c2de33a818d946e29b4"} Nov 21 14:06:12 crc kubenswrapper[4774]: I1121 14:06:12.653011 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:12 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:12 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:12 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:12 crc kubenswrapper[4774]: I1121 14:06:12.653403 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.050400 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f43d5d1c-936d-4928-a347-657f630b6c6c","Type":"ContainerStarted","Data":"15b6f751ef5445c873725b714ae6d87cad69d1d87ec0e7d42ac1833699505d35"} Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.085192 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.096841 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0d294e10-6a0e-4871-871c-01fb8e7ead03-metrics-certs\") pod \"network-metrics-daemon-44mbn\" (UID: \"0d294e10-6a0e-4871-871c-01fb8e7ead03\") " pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.180506 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-f9llk" Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.225356 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-44mbn" Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.663343 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:13 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:13 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:13 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.664159 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:13 crc kubenswrapper[4774]: I1121 14:06:13.753593 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-44mbn"] Nov 21 14:06:13 crc kubenswrapper[4774]: W1121 14:06:13.775391 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d294e10_6a0e_4871_871c_01fb8e7ead03.slice/crio-ab72b5a24cfc7b546ef02c259a049aa14a1d8b6bf2842c726e7bdcca67eb9bcb WatchSource:0}: Error finding container ab72b5a24cfc7b546ef02c259a049aa14a1d8b6bf2842c726e7bdcca67eb9bcb: Status 404 returned error can't find the container with id ab72b5a24cfc7b546ef02c259a049aa14a1d8b6bf2842c726e7bdcca67eb9bcb Nov 21 14:06:14 crc kubenswrapper[4774]: I1121 14:06:14.058774 4774 generic.go:334] "Generic (PLEG): container finished" podID="f43d5d1c-936d-4928-a347-657f630b6c6c" containerID="15b6f751ef5445c873725b714ae6d87cad69d1d87ec0e7d42ac1833699505d35" exitCode=0 Nov 21 14:06:14 crc kubenswrapper[4774]: I1121 14:06:14.059009 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f43d5d1c-936d-4928-a347-657f630b6c6c","Type":"ContainerDied","Data":"15b6f751ef5445c873725b714ae6d87cad69d1d87ec0e7d42ac1833699505d35"} Nov 21 14:06:14 crc kubenswrapper[4774]: I1121 14:06:14.060639 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-44mbn" event={"ID":"0d294e10-6a0e-4871-871c-01fb8e7ead03","Type":"ContainerStarted","Data":"ab72b5a24cfc7b546ef02c259a049aa14a1d8b6bf2842c726e7bdcca67eb9bcb"} Nov 21 14:06:14 crc kubenswrapper[4774]: I1121 14:06:14.658483 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:14 crc kubenswrapper[4774]: [-]has-synced failed: reason withheld Nov 21 14:06:14 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:14 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:14 crc kubenswrapper[4774]: I1121 14:06:14.658565 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.070571 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-44mbn" event={"ID":"0d294e10-6a0e-4871-871c-01fb8e7ead03","Type":"ContainerStarted","Data":"5a26f7b8dd7542d6eb13de14aef52fc142feeab1c8b4091911049b7995d076b2"} Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.298979 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.426056 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43d5d1c-936d-4928-a347-657f630b6c6c-kube-api-access\") pod \"f43d5d1c-936d-4928-a347-657f630b6c6c\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.426217 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43d5d1c-936d-4928-a347-657f630b6c6c-kubelet-dir\") pod \"f43d5d1c-936d-4928-a347-657f630b6c6c\" (UID: \"f43d5d1c-936d-4928-a347-657f630b6c6c\") " Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.426319 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f43d5d1c-936d-4928-a347-657f630b6c6c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f43d5d1c-936d-4928-a347-657f630b6c6c" (UID: "f43d5d1c-936d-4928-a347-657f630b6c6c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.426724 4774 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f43d5d1c-936d-4928-a347-657f630b6c6c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.448659 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f43d5d1c-936d-4928-a347-657f630b6c6c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f43d5d1c-936d-4928-a347-657f630b6c6c" (UID: "f43d5d1c-936d-4928-a347-657f630b6c6c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.528518 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f43d5d1c-936d-4928-a347-657f630b6c6c-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.657354 4774 patch_prober.go:28] interesting pod/router-default-5444994796-rmzf9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 21 14:06:15 crc kubenswrapper[4774]: [+]has-synced ok Nov 21 14:06:15 crc kubenswrapper[4774]: [+]process-running ok Nov 21 14:06:15 crc kubenswrapper[4774]: healthz check failed Nov 21 14:06:15 crc kubenswrapper[4774]: I1121 14:06:15.657430 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-rmzf9" podUID="7c1cea49-d382-4985-b4f3-4ec4a0ec52da" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 21 14:06:16 crc kubenswrapper[4774]: I1121 14:06:16.079924 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f43d5d1c-936d-4928-a347-657f630b6c6c","Type":"ContainerDied","Data":"cf6a5cf986d7c38b04ee3dbcfc6ad77f1933f509d20f4cefa98a5e08798c0a75"} Nov 21 14:06:16 crc kubenswrapper[4774]: I1121 14:06:16.079971 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf6a5cf986d7c38b04ee3dbcfc6ad77f1933f509d20f4cefa98a5e08798c0a75" Nov 21 14:06:16 crc kubenswrapper[4774]: I1121 14:06:16.080035 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 21 14:06:16 crc kubenswrapper[4774]: I1121 14:06:16.651763 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:06:16 crc kubenswrapper[4774]: I1121 14:06:16.655101 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-rmzf9" Nov 21 14:06:17 crc kubenswrapper[4774]: I1121 14:06:17.433561 4774 patch_prober.go:28] interesting pod/console-f9d7485db-w7tjv container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Nov 21 14:06:17 crc kubenswrapper[4774]: I1121 14:06:17.433977 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-w7tjv" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Nov 21 14:06:17 crc kubenswrapper[4774]: I1121 14:06:17.525869 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:17 crc kubenswrapper[4774]: I1121 14:06:17.525940 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:17 crc kubenswrapper[4774]: I1121 14:06:17.525949 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:17 crc kubenswrapper[4774]: I1121 14:06:17.526033 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:26 crc kubenswrapper[4774]: I1121 14:06:26.276795 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.437066 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.440523 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.525800 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.525877 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.525968 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.526012 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.526045 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.526569 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.526624 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.526664 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"31628deae1a7c50cc0974efd3d2d6322ddeafa196c32e23563ff74748de1877b"} pod="openshift-console/downloads-7954f5f757-4rc7m" containerMessage="Container download-server failed liveness probe, will be restarted" Nov 21 14:06:27 crc kubenswrapper[4774]: I1121 14:06:27.526767 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" containerID="cri-o://31628deae1a7c50cc0974efd3d2d6322ddeafa196c32e23563ff74748de1877b" gracePeriod=2 Nov 21 14:06:29 crc kubenswrapper[4774]: I1121 14:06:29.174796 4774 generic.go:334] "Generic (PLEG): container finished" podID="1c533952-b089-4c49-b4dc-a969c08022b9" containerID="31628deae1a7c50cc0974efd3d2d6322ddeafa196c32e23563ff74748de1877b" exitCode=0 Nov 21 14:06:29 crc kubenswrapper[4774]: I1121 14:06:29.174872 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4rc7m" event={"ID":"1c533952-b089-4c49-b4dc-a969c08022b9","Type":"ContainerDied","Data":"31628deae1a7c50cc0974efd3d2d6322ddeafa196c32e23563ff74748de1877b"} Nov 21 14:06:29 crc kubenswrapper[4774]: I1121 14:06:29.601295 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:06:29 crc kubenswrapper[4774]: I1121 14:06:29.601365 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:06:37 crc kubenswrapper[4774]: I1121 14:06:37.532195 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:37 crc kubenswrapper[4774]: I1121 14:06:37.532770 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:38 crc kubenswrapper[4774]: I1121 14:06:38.018941 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-ps5m6" Nov 21 14:06:39 crc kubenswrapper[4774]: I1121 14:06:39.121043 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 21 14:06:44 crc kubenswrapper[4774]: E1121 14:06:44.549594 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 21 14:06:44 crc kubenswrapper[4774]: E1121 14:06:44.550202 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fhzrk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-xt4q5_openshift-marketplace(e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:06:44 crc kubenswrapper[4774]: E1121 14:06:44.551786 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-xt4q5" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.160850 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-xt4q5" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.212345 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.212787 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f4x92,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-m7lsv_openshift-marketplace(43d9f80e-f6e4-4a6d-9454-a001ac75b5d4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.214093 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-m7lsv" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.276498 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.276686 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t5x25,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-lrb9s_openshift-marketplace(47f45e51-ad6b-4fb3-8777-92d9879a72cb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:06:46 crc kubenswrapper[4774]: E1121 14:06:46.278135 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-lrb9s" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" Nov 21 14:06:47 crc kubenswrapper[4774]: I1121 14:06:47.525959 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:47 crc kubenswrapper[4774]: I1121 14:06:47.526305 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:47 crc kubenswrapper[4774]: E1121 14:06:47.713898 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 21 14:06:47 crc kubenswrapper[4774]: E1121 14:06:47.714108 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8kh44,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-q922n_openshift-marketplace(8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:06:47 crc kubenswrapper[4774]: E1121 14:06:47.715538 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-q922n" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" Nov 21 14:06:51 crc kubenswrapper[4774]: E1121 14:06:51.547715 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-lrb9s" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" Nov 21 14:06:51 crc kubenswrapper[4774]: E1121 14:06:51.547910 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-q922n" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" Nov 21 14:06:51 crc kubenswrapper[4774]: E1121 14:06:51.548042 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-m7lsv" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" Nov 21 14:06:57 crc kubenswrapper[4774]: I1121 14:06:57.526110 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:06:57 crc kubenswrapper[4774]: I1121 14:06:57.526551 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:06:57 crc kubenswrapper[4774]: E1121 14:06:57.800513 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 21 14:06:57 crc kubenswrapper[4774]: E1121 14:06:57.800974 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b9kbt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-h7j28_openshift-marketplace(fcb48dc1-9e29-4b50-9687-a9ad5d1ff522): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:06:57 crc kubenswrapper[4774]: E1121 14:06:57.802172 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-h7j28" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" Nov 21 14:06:59 crc kubenswrapper[4774]: I1121 14:06:59.600482 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:06:59 crc kubenswrapper[4774]: I1121 14:06:59.600597 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:06:59 crc kubenswrapper[4774]: I1121 14:06:59.600683 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:06:59 crc kubenswrapper[4774]: I1121 14:06:59.601535 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:06:59 crc kubenswrapper[4774]: I1121 14:06:59.601608 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16" gracePeriod=600 Nov 21 14:06:59 crc kubenswrapper[4774]: E1121 14:06:59.715362 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 21 14:06:59 crc kubenswrapper[4774]: E1121 14:06:59.715609 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-svs9t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-zd7lj_openshift-marketplace(a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:06:59 crc kubenswrapper[4774]: E1121 14:06:59.717230 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-zd7lj" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" Nov 21 14:07:00 crc kubenswrapper[4774]: I1121 14:07:00.402435 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16" exitCode=0 Nov 21 14:07:00 crc kubenswrapper[4774]: I1121 14:07:00.402478 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16"} Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.110457 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-h7j28" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.111181 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-zd7lj" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.150641 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.151009 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z8g6z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tnpl6_openshift-marketplace(f7082230-9ac1-49e5-adb3-4a9178f9f796): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.152378 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-tnpl6" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.176882 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.177216 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6226j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-k42jk_openshift-marketplace(fe5f4376-0558-4824-a2d8-119c74a082eb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 14:07:06 crc kubenswrapper[4774]: E1121 14:07:06.178721 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-k42jk" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" Nov 21 14:07:07 crc kubenswrapper[4774]: I1121 14:07:07.525367 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:07:07 crc kubenswrapper[4774]: I1121 14:07:07.525807 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:07:09 crc kubenswrapper[4774]: I1121 14:07:09.459811 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-4rc7m" event={"ID":"1c533952-b089-4c49-b4dc-a969c08022b9","Type":"ContainerStarted","Data":"762359c21b60623ecbb32b6ea3fdfb663f8f15879d1bccda9d6047d9aa019017"} Nov 21 14:07:12 crc kubenswrapper[4774]: I1121 14:07:12.515201 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-44mbn" event={"ID":"0d294e10-6a0e-4871-871c-01fb8e7ead03","Type":"ContainerStarted","Data":"076ff0807b9585d7c454df68a97fbb295d6c7fe49fac2c0d9b7492dd5e82298b"} Nov 21 14:07:13 crc kubenswrapper[4774]: I1121 14:07:13.522132 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"2595c16f9eff7964aa17f69ce9e0ff010fda0dfd486f4645209c9e40e8a6db69"} Nov 21 14:07:13 crc kubenswrapper[4774]: I1121 14:07:13.522554 4774 patch_prober.go:28] interesting pod/downloads-7954f5f757-4rc7m container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Nov 21 14:07:13 crc kubenswrapper[4774]: I1121 14:07:13.522877 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-4rc7m" podUID="1c533952-b089-4c49-b4dc-a969c08022b9" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Nov 21 14:07:13 crc kubenswrapper[4774]: I1121 14:07:13.555854 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-44mbn" podStartSLOduration=203.555837441 podStartE2EDuration="3m23.555837441s" podCreationTimestamp="2025-11-21 14:03:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:07:13.554803972 +0000 UTC m=+224.207003251" watchObservedRunningTime="2025-11-21 14:07:13.555837441 +0000 UTC m=+224.208036710" Nov 21 14:07:17 crc kubenswrapper[4774]: I1121 14:07:17.525543 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:07:17 crc kubenswrapper[4774]: I1121 14:07:17.530989 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-4rc7m" Nov 21 14:07:34 crc kubenswrapper[4774]: I1121 14:07:34.668981 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerStarted","Data":"8e8a3bf4e1c2708bd6ba12036a1509b5c2cb0351d1ea3dde246929dbab82a799"} Nov 21 14:07:34 crc kubenswrapper[4774]: I1121 14:07:34.673040 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerStarted","Data":"e8602aae73c0dca4e63af61b3163c1316b82437931e9f33e1a62cc0d0ff6c938"} Nov 21 14:07:34 crc kubenswrapper[4774]: I1121 14:07:34.678009 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerStarted","Data":"1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe"} Nov 21 14:07:34 crc kubenswrapper[4774]: I1121 14:07:34.681293 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerStarted","Data":"f84647e9d6fd66f7a670e3fab7aff43ac6fb3878ba3c13da9887ca8f64221638"} Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.688682 4774 generic.go:334] "Generic (PLEG): container finished" podID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerID="8e8a3bf4e1c2708bd6ba12036a1509b5c2cb0351d1ea3dde246929dbab82a799" exitCode=0 Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.689333 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerDied","Data":"8e8a3bf4e1c2708bd6ba12036a1509b5c2cb0351d1ea3dde246929dbab82a799"} Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.693589 4774 generic.go:334] "Generic (PLEG): container finished" podID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerID="e8602aae73c0dca4e63af61b3163c1316b82437931e9f33e1a62cc0d0ff6c938" exitCode=0 Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.693786 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerDied","Data":"e8602aae73c0dca4e63af61b3163c1316b82437931e9f33e1a62cc0d0ff6c938"} Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.696628 4774 generic.go:334] "Generic (PLEG): container finished" podID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerID="1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe" exitCode=0 Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.696709 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerDied","Data":"1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe"} Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.702167 4774 generic.go:334] "Generic (PLEG): container finished" podID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerID="f84647e9d6fd66f7a670e3fab7aff43ac6fb3878ba3c13da9887ca8f64221638" exitCode=0 Nov 21 14:07:35 crc kubenswrapper[4774]: I1121 14:07:35.702278 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerDied","Data":"f84647e9d6fd66f7a670e3fab7aff43ac6fb3878ba3c13da9887ca8f64221638"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.759945 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerStarted","Data":"55cade1890016d5a13468a52cfd59adb69ccfd17b8649ca6b12495f3806a9879"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.761993 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerID="5738fff86a8f4598c438f62df944eab376c453df37e321feae6a5754337e46c8" exitCode=0 Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.762055 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zd7lj" event={"ID":"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a","Type":"ContainerDied","Data":"5738fff86a8f4598c438f62df944eab376c453df37e321feae6a5754337e46c8"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.765370 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerStarted","Data":"323138e463303dd707163620b3580837c8be3805ba37a51422523f5d102ce3b5"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.769076 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerStarted","Data":"57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.772020 4774 generic.go:334] "Generic (PLEG): container finished" podID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerID="deee6091927812ec21530e1900da4034d0ff45b736f4f463ccc60249d0511ef0" exitCode=0 Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.772088 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h7j28" event={"ID":"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522","Type":"ContainerDied","Data":"deee6091927812ec21530e1900da4034d0ff45b736f4f463ccc60249d0511ef0"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.777387 4774 generic.go:334] "Generic (PLEG): container finished" podID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerID="2756624a026475f37f0d4979b5b362c2599010eb73fc3118d80fdafd75104e9e" exitCode=0 Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.777449 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerDied","Data":"2756624a026475f37f0d4979b5b362c2599010eb73fc3118d80fdafd75104e9e"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.784395 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerStarted","Data":"cef375eab477fc7f20be1959022fec29fbeb53d2b7abc0fa6b121a6c154995e5"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.786726 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerStarted","Data":"7a262c4217968154707731aedcb93e0f3d47d15d981eedcd7173aecfa99d38c8"} Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.801899 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xt4q5" podStartSLOduration=2.96334296 podStartE2EDuration="1m39.801881395s" podCreationTimestamp="2025-11-21 14:06:05 +0000 UTC" firstStartedPulling="2025-11-21 14:06:06.815691295 +0000 UTC m=+157.467890554" lastFinishedPulling="2025-11-21 14:07:43.65422973 +0000 UTC m=+254.306428989" observedRunningTime="2025-11-21 14:07:44.799055925 +0000 UTC m=+255.451255184" watchObservedRunningTime="2025-11-21 14:07:44.801881395 +0000 UTC m=+255.454080654" Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.876771 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lrb9s" podStartSLOduration=3.918702555 podStartE2EDuration="1m39.876751887s" podCreationTimestamp="2025-11-21 14:06:05 +0000 UTC" firstStartedPulling="2025-11-21 14:06:07.939172505 +0000 UTC m=+158.591371764" lastFinishedPulling="2025-11-21 14:07:43.897221837 +0000 UTC m=+254.549421096" observedRunningTime="2025-11-21 14:07:44.872957839 +0000 UTC m=+255.525157118" watchObservedRunningTime="2025-11-21 14:07:44.876751887 +0000 UTC m=+255.528951146" Nov 21 14:07:44 crc kubenswrapper[4774]: I1121 14:07:44.953230 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-m7lsv" podStartSLOduration=3.015466705 podStartE2EDuration="1m37.953210794s" podCreationTimestamp="2025-11-21 14:06:07 +0000 UTC" firstStartedPulling="2025-11-21 14:06:08.948949499 +0000 UTC m=+159.601148758" lastFinishedPulling="2025-11-21 14:07:43.886693588 +0000 UTC m=+254.538892847" observedRunningTime="2025-11-21 14:07:44.949538539 +0000 UTC m=+255.601737798" watchObservedRunningTime="2025-11-21 14:07:44.953210794 +0000 UTC m=+255.605410053" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.000390 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q922n" podStartSLOduration=3.005418141 podStartE2EDuration="1m39.00036853s" podCreationTimestamp="2025-11-21 14:06:06 +0000 UTC" firstStartedPulling="2025-11-21 14:06:07.904750088 +0000 UTC m=+158.556949347" lastFinishedPulling="2025-11-21 14:07:43.899700477 +0000 UTC m=+254.551899736" observedRunningTime="2025-11-21 14:07:44.98272879 +0000 UTC m=+255.634928049" watchObservedRunningTime="2025-11-21 14:07:45.00036853 +0000 UTC m=+255.652567789" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.434130 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.434503 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.805095 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zd7lj" event={"ID":"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a","Type":"ContainerStarted","Data":"2acc656be78909f5a7da9db73fbb6d9f4b013d4f516f60ce56ab44f4651f1449"} Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.806851 4774 generic.go:334] "Generic (PLEG): container finished" podID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerID="323138e463303dd707163620b3580837c8be3805ba37a51422523f5d102ce3b5" exitCode=0 Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.806919 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerDied","Data":"323138e463303dd707163620b3580837c8be3805ba37a51422523f5d102ce3b5"} Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.813254 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h7j28" event={"ID":"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522","Type":"ContainerStarted","Data":"1a1c0c730cf0f89b3f2fcd73c45ec35321071bcce020a21db734f352a3817d3b"} Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.816340 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerStarted","Data":"1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346"} Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.859530 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zd7lj" podStartSLOduration=3.557292864 podStartE2EDuration="1m41.859510909s" podCreationTimestamp="2025-11-21 14:06:04 +0000 UTC" firstStartedPulling="2025-11-21 14:06:06.840632303 +0000 UTC m=+157.492831562" lastFinishedPulling="2025-11-21 14:07:45.142850348 +0000 UTC m=+255.795049607" observedRunningTime="2025-11-21 14:07:45.858053448 +0000 UTC m=+256.510252717" watchObservedRunningTime="2025-11-21 14:07:45.859510909 +0000 UTC m=+256.511710178" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.901885 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.902322 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.915284 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tnpl6" podStartSLOduration=5.7462283119999995 podStartE2EDuration="1m37.915258589s" podCreationTimestamp="2025-11-21 14:06:08 +0000 UTC" firstStartedPulling="2025-11-21 14:06:13.056205976 +0000 UTC m=+163.708405235" lastFinishedPulling="2025-11-21 14:07:45.225236253 +0000 UTC m=+255.877435512" observedRunningTime="2025-11-21 14:07:45.887295087 +0000 UTC m=+256.539494346" watchObservedRunningTime="2025-11-21 14:07:45.915258589 +0000 UTC m=+256.567457848" Nov 21 14:07:45 crc kubenswrapper[4774]: I1121 14:07:45.916957 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-h7j28" podStartSLOduration=3.618395605 podStartE2EDuration="1m40.916951157s" podCreationTimestamp="2025-11-21 14:06:05 +0000 UTC" firstStartedPulling="2025-11-21 14:06:07.932852736 +0000 UTC m=+158.585051995" lastFinishedPulling="2025-11-21 14:07:45.231408288 +0000 UTC m=+255.883607547" observedRunningTime="2025-11-21 14:07:45.915743203 +0000 UTC m=+256.567942472" watchObservedRunningTime="2025-11-21 14:07:45.916951157 +0000 UTC m=+256.569150416" Nov 21 14:07:46 crc kubenswrapper[4774]: I1121 14:07:46.620882 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-xt4q5" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="registry-server" probeResult="failure" output=< Nov 21 14:07:46 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:07:46 crc kubenswrapper[4774]: > Nov 21 14:07:46 crc kubenswrapper[4774]: I1121 14:07:46.945332 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-lrb9s" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="registry-server" probeResult="failure" output=< Nov 21 14:07:46 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:07:46 crc kubenswrapper[4774]: > Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.336911 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.337231 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.399007 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.614671 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.614803 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.663362 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.827998 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerStarted","Data":"1d8adb5f5c07861da2ee43f3906018923581faf4a854b39d907f311e56c5d68a"} Nov 21 14:07:47 crc kubenswrapper[4774]: I1121 14:07:47.846439 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k42jk" podStartSLOduration=5.99036607 podStartE2EDuration="1m39.84641664s" podCreationTimestamp="2025-11-21 14:06:08 +0000 UTC" firstStartedPulling="2025-11-21 14:06:13.052710197 +0000 UTC m=+163.704909456" lastFinishedPulling="2025-11-21 14:07:46.908760777 +0000 UTC m=+257.560960026" observedRunningTime="2025-11-21 14:07:47.846085741 +0000 UTC m=+258.498285010" watchObservedRunningTime="2025-11-21 14:07:47.84641664 +0000 UTC m=+258.498615909" Nov 21 14:07:48 crc kubenswrapper[4774]: I1121 14:07:48.631361 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:07:48 crc kubenswrapper[4774]: I1121 14:07:48.631663 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:07:49 crc kubenswrapper[4774]: I1121 14:07:49.143202 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:07:49 crc kubenswrapper[4774]: I1121 14:07:49.143258 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:07:49 crc kubenswrapper[4774]: I1121 14:07:49.677910 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-k42jk" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="registry-server" probeResult="failure" output=< Nov 21 14:07:49 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:07:49 crc kubenswrapper[4774]: > Nov 21 14:07:50 crc kubenswrapper[4774]: I1121 14:07:50.194376 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tnpl6" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="registry-server" probeResult="failure" output=< Nov 21 14:07:50 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:07:50 crc kubenswrapper[4774]: > Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.255424 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.255887 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.300412 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.473200 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.514481 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.643498 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.643561 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.682432 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.913637 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.920112 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.956471 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:07:55 crc kubenswrapper[4774]: I1121 14:07:55.995378 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:07:57 crc kubenswrapper[4774]: I1121 14:07:57.331457 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lrb9s"] Nov 21 14:07:57 crc kubenswrapper[4774]: I1121 14:07:57.380264 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:07:57 crc kubenswrapper[4774]: I1121 14:07:57.658591 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:07:57 crc kubenswrapper[4774]: I1121 14:07:57.882590 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lrb9s" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="registry-server" containerID="cri-o://cef375eab477fc7f20be1959022fec29fbeb53d2b7abc0fa6b121a6c154995e5" gracePeriod=2 Nov 21 14:07:57 crc kubenswrapper[4774]: I1121 14:07:57.931590 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h7j28"] Nov 21 14:07:57 crc kubenswrapper[4774]: I1121 14:07:57.931852 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-h7j28" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="registry-server" containerID="cri-o://1a1c0c730cf0f89b3f2fcd73c45ec35321071bcce020a21db734f352a3817d3b" gracePeriod=2 Nov 21 14:07:58 crc kubenswrapper[4774]: I1121 14:07:58.686237 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:07:58 crc kubenswrapper[4774]: I1121 14:07:58.726151 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:07:58 crc kubenswrapper[4774]: I1121 14:07:58.892333 4774 generic.go:334] "Generic (PLEG): container finished" podID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerID="cef375eab477fc7f20be1959022fec29fbeb53d2b7abc0fa6b121a6c154995e5" exitCode=0 Nov 21 14:07:58 crc kubenswrapper[4774]: I1121 14:07:58.892386 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerDied","Data":"cef375eab477fc7f20be1959022fec29fbeb53d2b7abc0fa6b121a6c154995e5"} Nov 21 14:07:59 crc kubenswrapper[4774]: I1121 14:07:59.183077 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:07:59 crc kubenswrapper[4774]: I1121 14:07:59.224516 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:07:59 crc kubenswrapper[4774]: I1121 14:07:59.908023 4774 generic.go:334] "Generic (PLEG): container finished" podID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerID="1a1c0c730cf0f89b3f2fcd73c45ec35321071bcce020a21db734f352a3817d3b" exitCode=0 Nov 21 14:07:59 crc kubenswrapper[4774]: I1121 14:07:59.908207 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h7j28" event={"ID":"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522","Type":"ContainerDied","Data":"1a1c0c730cf0f89b3f2fcd73c45ec35321071bcce020a21db734f352a3817d3b"} Nov 21 14:07:59 crc kubenswrapper[4774]: I1121 14:07:59.984735 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.145401 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-catalog-content\") pod \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.145569 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-utilities\") pod \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.145598 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5x25\" (UniqueName: \"kubernetes.io/projected/47f45e51-ad6b-4fb3-8777-92d9879a72cb-kube-api-access-t5x25\") pod \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\" (UID: \"47f45e51-ad6b-4fb3-8777-92d9879a72cb\") " Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.147695 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-utilities" (OuterVolumeSpecName: "utilities") pod "47f45e51-ad6b-4fb3-8777-92d9879a72cb" (UID: "47f45e51-ad6b-4fb3-8777-92d9879a72cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.169060 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47f45e51-ad6b-4fb3-8777-92d9879a72cb-kube-api-access-t5x25" (OuterVolumeSpecName: "kube-api-access-t5x25") pod "47f45e51-ad6b-4fb3-8777-92d9879a72cb" (UID: "47f45e51-ad6b-4fb3-8777-92d9879a72cb"). InnerVolumeSpecName "kube-api-access-t5x25". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.247674 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.247707 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5x25\" (UniqueName: \"kubernetes.io/projected/47f45e51-ad6b-4fb3-8777-92d9879a72cb-kube-api-access-t5x25\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.257154 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47f45e51-ad6b-4fb3-8777-92d9879a72cb" (UID: "47f45e51-ad6b-4fb3-8777-92d9879a72cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.286340 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.335044 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m7lsv"] Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.335269 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-m7lsv" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="registry-server" containerID="cri-o://57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7" gracePeriod=2 Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.348440 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47f45e51-ad6b-4fb3-8777-92d9879a72cb-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.449938 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-catalog-content\") pod \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.449994 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9kbt\" (UniqueName: \"kubernetes.io/projected/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-kube-api-access-b9kbt\") pod \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.450120 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-utilities\") pod \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\" (UID: \"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522\") " Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.451282 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-utilities" (OuterVolumeSpecName: "utilities") pod "fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" (UID: "fcb48dc1-9e29-4b50-9687-a9ad5d1ff522"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.456017 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-kube-api-access-b9kbt" (OuterVolumeSpecName: "kube-api-access-b9kbt") pod "fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" (UID: "fcb48dc1-9e29-4b50-9687-a9ad5d1ff522"). InnerVolumeSpecName "kube-api-access-b9kbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.504140 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" (UID: "fcb48dc1-9e29-4b50-9687-a9ad5d1ff522"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.552145 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.552195 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.552210 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9kbt\" (UniqueName: \"kubernetes.io/projected/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522-kube-api-access-b9kbt\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.916798 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lrb9s" event={"ID":"47f45e51-ad6b-4fb3-8777-92d9879a72cb","Type":"ContainerDied","Data":"a601e538acdbdb997cf29521c48401cf774aaeded8bcad2404ca50c2c2eeda32"} Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.916886 4774 scope.go:117] "RemoveContainer" containerID="cef375eab477fc7f20be1959022fec29fbeb53d2b7abc0fa6b121a6c154995e5" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.917037 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lrb9s" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.921660 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-h7j28" event={"ID":"fcb48dc1-9e29-4b50-9687-a9ad5d1ff522","Type":"ContainerDied","Data":"1e7d3601aa68e799ab4f4d951b44d48e4f862d708d5c3fd994032b3e1bb23f6c"} Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.921809 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-h7j28" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.939110 4774 scope.go:117] "RemoveContainer" containerID="f84647e9d6fd66f7a670e3fab7aff43ac6fb3878ba3c13da9887ca8f64221638" Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.957227 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-h7j28"] Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.963228 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-h7j28"] Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.974843 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lrb9s"] Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.979102 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lrb9s"] Nov 21 14:08:00 crc kubenswrapper[4774]: I1121 14:08:00.988295 4774 scope.go:117] "RemoveContainer" containerID="4373ec705b3dc1fb67c8df8856024caff9942873e127b5ad8b8ee1cba61414b4" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.003328 4774 scope.go:117] "RemoveContainer" containerID="1a1c0c730cf0f89b3f2fcd73c45ec35321071bcce020a21db734f352a3817d3b" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.022289 4774 scope.go:117] "RemoveContainer" containerID="deee6091927812ec21530e1900da4034d0ff45b736f4f463ccc60249d0511ef0" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.039120 4774 scope.go:117] "RemoveContainer" containerID="48f350b9cc835725bad877b57431c6223f163c3e7e579648842b4043d38eac68" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.896767 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.928205 4774 generic.go:334] "Generic (PLEG): container finished" podID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerID="57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7" exitCode=0 Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.928262 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-m7lsv" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.928313 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerDied","Data":"57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7"} Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.928356 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-m7lsv" event={"ID":"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4","Type":"ContainerDied","Data":"86ac864af53183f329aeb1fa95a14370b73ea7e146fe2b16df4b35685af95902"} Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.928381 4774 scope.go:117] "RemoveContainer" containerID="57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.949120 4774 scope.go:117] "RemoveContainer" containerID="1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.970016 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-catalog-content\") pod \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.970451 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-utilities\") pod \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.970510 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4x92\" (UniqueName: \"kubernetes.io/projected/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-kube-api-access-f4x92\") pod \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\" (UID: \"43d9f80e-f6e4-4a6d-9454-a001ac75b5d4\") " Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.971360 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-utilities" (OuterVolumeSpecName: "utilities") pod "43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" (UID: "43d9f80e-f6e4-4a6d-9454-a001ac75b5d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.973915 4774 scope.go:117] "RemoveContainer" containerID="0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3" Nov 21 14:08:01 crc kubenswrapper[4774]: I1121 14:08:01.983989 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-kube-api-access-f4x92" (OuterVolumeSpecName: "kube-api-access-f4x92") pod "43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" (UID: "43d9f80e-f6e4-4a6d-9454-a001ac75b5d4"). InnerVolumeSpecName "kube-api-access-f4x92". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.013221 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" (UID: "43d9f80e-f6e4-4a6d-9454-a001ac75b5d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.028989 4774 scope.go:117] "RemoveContainer" containerID="57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7" Nov 21 14:08:02 crc kubenswrapper[4774]: E1121 14:08:02.029583 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7\": container with ID starting with 57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7 not found: ID does not exist" containerID="57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.029652 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7"} err="failed to get container status \"57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7\": rpc error: code = NotFound desc = could not find container \"57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7\": container with ID starting with 57d1e41b45a16f91091e939fc0e922575ed6ceb8537e221159f9b3f7e0d9e3d7 not found: ID does not exist" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.029698 4774 scope.go:117] "RemoveContainer" containerID="1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe" Nov 21 14:08:02 crc kubenswrapper[4774]: E1121 14:08:02.030069 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe\": container with ID starting with 1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe not found: ID does not exist" containerID="1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.030100 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe"} err="failed to get container status \"1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe\": rpc error: code = NotFound desc = could not find container \"1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe\": container with ID starting with 1328baca1a1850735fc6cca257c7e3767e376efb722977d7de93fd1ef26609fe not found: ID does not exist" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.030118 4774 scope.go:117] "RemoveContainer" containerID="0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3" Nov 21 14:08:02 crc kubenswrapper[4774]: E1121 14:08:02.030448 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3\": container with ID starting with 0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3 not found: ID does not exist" containerID="0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.030496 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3"} err="failed to get container status \"0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3\": rpc error: code = NotFound desc = could not find container \"0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3\": container with ID starting with 0cf336bad906408f05ba69b5dd0d0a3cd0a5083f0d2a879fa0201ef853cb2bc3 not found: ID does not exist" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.072502 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4x92\" (UniqueName: \"kubernetes.io/projected/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-kube-api-access-f4x92\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.072545 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.072555 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.101097 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" path="/var/lib/kubelet/pods/47f45e51-ad6b-4fb3-8777-92d9879a72cb/volumes" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.102014 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" path="/var/lib/kubelet/pods/fcb48dc1-9e29-4b50-9687-a9ad5d1ff522/volumes" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.244262 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-m7lsv"] Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.247212 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-m7lsv"] Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.730288 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnpl6"] Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.730578 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tnpl6" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="registry-server" containerID="cri-o://1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346" gracePeriod=2 Nov 21 14:08:02 crc kubenswrapper[4774]: E1121 14:08:02.832745 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7082230_9ac1_49e5_adb3_4a9178f9f796.slice/crio-1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7082230_9ac1_49e5_adb3_4a9178f9f796.slice/crio-conmon-1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346.scope\": RecentStats: unable to find data in memory cache]" Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.952585 4774 generic.go:334] "Generic (PLEG): container finished" podID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerID="1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346" exitCode=0 Nov 21 14:08:02 crc kubenswrapper[4774]: I1121 14:08:02.952669 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerDied","Data":"1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346"} Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.085790 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.184583 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8g6z\" (UniqueName: \"kubernetes.io/projected/f7082230-9ac1-49e5-adb3-4a9178f9f796-kube-api-access-z8g6z\") pod \"f7082230-9ac1-49e5-adb3-4a9178f9f796\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.184644 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-utilities\") pod \"f7082230-9ac1-49e5-adb3-4a9178f9f796\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.184691 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-catalog-content\") pod \"f7082230-9ac1-49e5-adb3-4a9178f9f796\" (UID: \"f7082230-9ac1-49e5-adb3-4a9178f9f796\") " Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.185623 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-utilities" (OuterVolumeSpecName: "utilities") pod "f7082230-9ac1-49e5-adb3-4a9178f9f796" (UID: "f7082230-9ac1-49e5-adb3-4a9178f9f796"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.190047 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7082230-9ac1-49e5-adb3-4a9178f9f796-kube-api-access-z8g6z" (OuterVolumeSpecName: "kube-api-access-z8g6z") pod "f7082230-9ac1-49e5-adb3-4a9178f9f796" (UID: "f7082230-9ac1-49e5-adb3-4a9178f9f796"). InnerVolumeSpecName "kube-api-access-z8g6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.279703 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f7082230-9ac1-49e5-adb3-4a9178f9f796" (UID: "f7082230-9ac1-49e5-adb3-4a9178f9f796"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.286262 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8g6z\" (UniqueName: \"kubernetes.io/projected/f7082230-9ac1-49e5-adb3-4a9178f9f796-kube-api-access-z8g6z\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.286304 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.286318 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7082230-9ac1-49e5-adb3-4a9178f9f796-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.964285 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnpl6" event={"ID":"f7082230-9ac1-49e5-adb3-4a9178f9f796","Type":"ContainerDied","Data":"87bdf4b9420b251dd50d584e4ccbf8dfad92f42ce8be121c1340f881675b4f8f"} Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.964365 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnpl6" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.964791 4774 scope.go:117] "RemoveContainer" containerID="1802a2d7d3c1c5c751ad5d4e8e3d4c9925e96290b75e282d38ca44baaab09346" Nov 21 14:08:03 crc kubenswrapper[4774]: I1121 14:08:03.991971 4774 scope.go:117] "RemoveContainer" containerID="2756624a026475f37f0d4979b5b362c2599010eb73fc3118d80fdafd75104e9e" Nov 21 14:08:04 crc kubenswrapper[4774]: I1121 14:08:04.009233 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnpl6"] Nov 21 14:08:04 crc kubenswrapper[4774]: I1121 14:08:04.018997 4774 scope.go:117] "RemoveContainer" containerID="dfac67ee8bb59cfe001b08af5a6081a1c259a4b6aa029819a8d2cb7ae96f9798" Nov 21 14:08:04 crc kubenswrapper[4774]: I1121 14:08:04.022597 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tnpl6"] Nov 21 14:08:04 crc kubenswrapper[4774]: I1121 14:08:04.099676 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" path="/var/lib/kubelet/pods/43d9f80e-f6e4-4a6d-9454-a001ac75b5d4/volumes" Nov 21 14:08:04 crc kubenswrapper[4774]: I1121 14:08:04.100457 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" path="/var/lib/kubelet/pods/f7082230-9ac1-49e5-adb3-4a9178f9f796/volumes" Nov 21 14:08:16 crc kubenswrapper[4774]: I1121 14:08:16.644379 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gfhfj"] Nov 21 14:08:41 crc kubenswrapper[4774]: I1121 14:08:41.672760 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" podUID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" containerName="oauth-openshift" containerID="cri-o://51c3eb95b0d6be37dd22ef2b1aca7b3fe2681b15a586447a0bcf9e98febd5354" gracePeriod=15 Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.203957 4774 generic.go:334] "Generic (PLEG): container finished" podID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" containerID="51c3eb95b0d6be37dd22ef2b1aca7b3fe2681b15a586447a0bcf9e98febd5354" exitCode=0 Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.204332 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" event={"ID":"cc8ceff5-9c76-4521-a560-d9e6424c93f8","Type":"ContainerDied","Data":"51c3eb95b0d6be37dd22ef2b1aca7b3fe2681b15a586447a0bcf9e98febd5354"} Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.566136 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596601 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7687c8778f-sxcnt"] Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596797 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596808 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596835 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596841 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596851 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596857 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596866 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596872 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="extract-content" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596879 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596884 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596892 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="653e9ec6-e80c-43cd-8d71-f194fc0e40fa" containerName="pruner" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596897 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="653e9ec6-e80c-43cd-8d71-f194fc0e40fa" containerName="pruner" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596908 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596914 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596923 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f43d5d1c-936d-4928-a347-657f630b6c6c" containerName="pruner" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596928 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f43d5d1c-936d-4928-a347-657f630b6c6c" containerName="pruner" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596938 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" containerName="oauth-openshift" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596944 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" containerName="oauth-openshift" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596953 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596960 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596968 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596974 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596983 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.596990 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="extract-utilities" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.596999 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597005 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.597012 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597017 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: E1121 14:08:42.597025 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597030 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597130 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7082230-9ac1-49e5-adb3-4a9178f9f796" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597139 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="47f45e51-ad6b-4fb3-8777-92d9879a72cb" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597146 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcb48dc1-9e29-4b50-9687-a9ad5d1ff522" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597158 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f43d5d1c-936d-4928-a347-657f630b6c6c" containerName="pruner" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597164 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" containerName="oauth-openshift" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597172 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="43d9f80e-f6e4-4a6d-9454-a001ac75b5d4" containerName="registry-server" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597179 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="653e9ec6-e80c-43cd-8d71-f194fc0e40fa" containerName="pruner" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.597538 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.610544 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7687c8778f-sxcnt"] Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645293 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-ocp-branding-template\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645352 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-login\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645413 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-idp-0-file-data\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645441 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-trusted-ca-bundle\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645472 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-provider-selection\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645499 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-session\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645528 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-serving-cert\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645552 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-policies\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645579 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-service-ca\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645606 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqm5j\" (UniqueName: \"kubernetes.io/projected/cc8ceff5-9c76-4521-a560-d9e6424c93f8-kube-api-access-rqm5j\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645629 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-error\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645650 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-dir\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645665 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-router-certs\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645686 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-cliconfig\") pod \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\" (UID: \"cc8ceff5-9c76-4521-a560-d9e6424c93f8\") " Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645755 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645785 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-audit-policies\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645843 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645877 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645907 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-error\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645931 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-session\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645958 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5256d496-306b-4ca9-8019-d18ca1b21cb5-audit-dir\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.645984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646030 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxlfn\" (UniqueName: \"kubernetes.io/projected/5256d496-306b-4ca9-8019-d18ca1b21cb5-kube-api-access-dxlfn\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646023 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646059 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646088 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-login\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646112 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646140 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646171 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646213 4774 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646303 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646643 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.646897 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.647176 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.652077 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc8ceff5-9c76-4521-a560-d9e6424c93f8-kube-api-access-rqm5j" (OuterVolumeSpecName: "kube-api-access-rqm5j") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "kube-api-access-rqm5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.652187 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.652975 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.654043 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.654404 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.654693 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.659108 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.659309 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.659714 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "cc8ceff5-9c76-4521-a560-d9e6424c93f8" (UID: "cc8ceff5-9c76-4521-a560-d9e6424c93f8"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.746766 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.746889 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.746930 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-audit-policies\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.746957 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.746979 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747009 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-error\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747031 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-session\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747056 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5256d496-306b-4ca9-8019-d18ca1b21cb5-audit-dir\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747081 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747125 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxlfn\" (UniqueName: \"kubernetes.io/projected/5256d496-306b-4ca9-8019-d18ca1b21cb5-kube-api-access-dxlfn\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747149 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747177 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-login\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747200 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747224 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747244 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5256d496-306b-4ca9-8019-d18ca1b21cb5-audit-dir\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747283 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747299 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747324 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747353 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747372 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747389 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747406 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747418 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747430 4774 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747441 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747452 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqm5j\" (UniqueName: \"kubernetes.io/projected/cc8ceff5-9c76-4521-a560-d9e6424c93f8-kube-api-access-rqm5j\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747463 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747478 4774 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cc8ceff5-9c76-4521-a560-d9e6424c93f8-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.747963 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-audit-policies\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.748291 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.748336 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.749886 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-service-ca\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.751331 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.751854 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-session\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.751877 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-router-certs\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.752061 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.752293 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.754023 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.754080 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-error\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.754546 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5256d496-306b-4ca9-8019-d18ca1b21cb5-v4-0-config-user-template-login\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.764307 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxlfn\" (UniqueName: \"kubernetes.io/projected/5256d496-306b-4ca9-8019-d18ca1b21cb5-kube-api-access-dxlfn\") pod \"oauth-openshift-7687c8778f-sxcnt\" (UID: \"5256d496-306b-4ca9-8019-d18ca1b21cb5\") " pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:42 crc kubenswrapper[4774]: I1121 14:08:42.919593 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:43 crc kubenswrapper[4774]: I1121 14:08:43.210473 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" event={"ID":"cc8ceff5-9c76-4521-a560-d9e6424c93f8","Type":"ContainerDied","Data":"92e97f1ab2fc6ab8e95513562d425d9a1a016e2766833b14e61ca2f8e24fa9a8"} Nov 21 14:08:43 crc kubenswrapper[4774]: I1121 14:08:43.210538 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-gfhfj" Nov 21 14:08:43 crc kubenswrapper[4774]: I1121 14:08:43.210776 4774 scope.go:117] "RemoveContainer" containerID="51c3eb95b0d6be37dd22ef2b1aca7b3fe2681b15a586447a0bcf9e98febd5354" Nov 21 14:08:43 crc kubenswrapper[4774]: I1121 14:08:43.245204 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gfhfj"] Nov 21 14:08:43 crc kubenswrapper[4774]: I1121 14:08:43.248678 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-gfhfj"] Nov 21 14:08:43 crc kubenswrapper[4774]: I1121 14:08:43.364360 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7687c8778f-sxcnt"] Nov 21 14:08:44 crc kubenswrapper[4774]: I1121 14:08:44.100213 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc8ceff5-9c76-4521-a560-d9e6424c93f8" path="/var/lib/kubelet/pods/cc8ceff5-9c76-4521-a560-d9e6424c93f8/volumes" Nov 21 14:08:44 crc kubenswrapper[4774]: I1121 14:08:44.219469 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" event={"ID":"5256d496-306b-4ca9-8019-d18ca1b21cb5","Type":"ContainerStarted","Data":"7fad0d53380bdc8213454425e31a4880678823b2264b1d552ae4d525366b07eb"} Nov 21 14:08:44 crc kubenswrapper[4774]: I1121 14:08:44.219760 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" event={"ID":"5256d496-306b-4ca9-8019-d18ca1b21cb5","Type":"ContainerStarted","Data":"ee6baf1e113120d662782d4747e5a15dad639b9d384856f0c6bf63caf0de0628"} Nov 21 14:08:44 crc kubenswrapper[4774]: I1121 14:08:44.220835 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:44 crc kubenswrapper[4774]: I1121 14:08:44.307618 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" Nov 21 14:08:44 crc kubenswrapper[4774]: I1121 14:08:44.330243 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7687c8778f-sxcnt" podStartSLOduration=28.330221865 podStartE2EDuration="28.330221865s" podCreationTimestamp="2025-11-21 14:08:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:08:44.241939913 +0000 UTC m=+314.894139172" watchObservedRunningTime="2025-11-21 14:08:44.330221865 +0000 UTC m=+314.982421164" Nov 21 14:08:55 crc kubenswrapper[4774]: I1121 14:08:55.985288 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xt4q5"] Nov 21 14:08:55 crc kubenswrapper[4774]: I1121 14:08:55.986139 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xt4q5" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="registry-server" containerID="cri-o://55cade1890016d5a13468a52cfd59adb69ccfd17b8649ca6b12495f3806a9879" gracePeriod=30 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.004482 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zd7lj"] Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.004994 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zd7lj" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="registry-server" containerID="cri-o://2acc656be78909f5a7da9db73fbb6d9f4b013d4f516f60ce56ab44f4651f1449" gracePeriod=30 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.007735 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gsdtl"] Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.008263 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" containerID="cri-o://2114d486f39320a0ed11e3f3fcde82aece19974603afe5268eae303440a5f017" gracePeriod=30 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.020337 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q922n"] Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.020696 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q922n" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="registry-server" containerID="cri-o://7a262c4217968154707731aedcb93e0f3d47d15d981eedcd7173aecfa99d38c8" gracePeriod=30 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.030721 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k42jk"] Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.031004 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k42jk" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="registry-server" containerID="cri-o://1d8adb5f5c07861da2ee43f3906018923581faf4a854b39d907f311e56c5d68a" gracePeriod=30 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.038326 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5nkpf"] Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.045296 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.060479 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5nkpf"] Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.215527 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqrpx\" (UniqueName: \"kubernetes.io/projected/81b3a21d-90d3-446a-b6ab-f3be7356fd56-kube-api-access-sqrpx\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.215591 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/81b3a21d-90d3-446a-b6ab-f3be7356fd56-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.215611 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/81b3a21d-90d3-446a-b6ab-f3be7356fd56-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.300213 4774 generic.go:334] "Generic (PLEG): container finished" podID="4aea8951-3939-4012-966c-b0571f992df4" containerID="2114d486f39320a0ed11e3f3fcde82aece19974603afe5268eae303440a5f017" exitCode=0 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.300314 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" event={"ID":"4aea8951-3939-4012-966c-b0571f992df4","Type":"ContainerDied","Data":"2114d486f39320a0ed11e3f3fcde82aece19974603afe5268eae303440a5f017"} Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.303871 4774 generic.go:334] "Generic (PLEG): container finished" podID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerID="7a262c4217968154707731aedcb93e0f3d47d15d981eedcd7173aecfa99d38c8" exitCode=0 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.303939 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerDied","Data":"7a262c4217968154707731aedcb93e0f3d47d15d981eedcd7173aecfa99d38c8"} Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.306524 4774 generic.go:334] "Generic (PLEG): container finished" podID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerID="55cade1890016d5a13468a52cfd59adb69ccfd17b8649ca6b12495f3806a9879" exitCode=0 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.306553 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerDied","Data":"55cade1890016d5a13468a52cfd59adb69ccfd17b8649ca6b12495f3806a9879"} Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.308599 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerID="2acc656be78909f5a7da9db73fbb6d9f4b013d4f516f60ce56ab44f4651f1449" exitCode=0 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.308659 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zd7lj" event={"ID":"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a","Type":"ContainerDied","Data":"2acc656be78909f5a7da9db73fbb6d9f4b013d4f516f60ce56ab44f4651f1449"} Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.312529 4774 generic.go:334] "Generic (PLEG): container finished" podID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerID="1d8adb5f5c07861da2ee43f3906018923581faf4a854b39d907f311e56c5d68a" exitCode=0 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.312558 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerDied","Data":"1d8adb5f5c07861da2ee43f3906018923581faf4a854b39d907f311e56c5d68a"} Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.317292 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/81b3a21d-90d3-446a-b6ab-f3be7356fd56-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.317411 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqrpx\" (UniqueName: \"kubernetes.io/projected/81b3a21d-90d3-446a-b6ab-f3be7356fd56-kube-api-access-sqrpx\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.317459 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/81b3a21d-90d3-446a-b6ab-f3be7356fd56-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.324095 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/81b3a21d-90d3-446a-b6ab-f3be7356fd56-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.328577 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/81b3a21d-90d3-446a-b6ab-f3be7356fd56-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.333346 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqrpx\" (UniqueName: \"kubernetes.io/projected/81b3a21d-90d3-446a-b6ab-f3be7356fd56-kube-api-access-sqrpx\") pod \"marketplace-operator-79b997595-5nkpf\" (UID: \"81b3a21d-90d3-446a-b6ab-f3be7356fd56\") " pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.441742 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.448002 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.450136 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.476702 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.486708 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.494088 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.620639 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-utilities\") pod \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.620697 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-catalog-content\") pod \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.620732 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aea8951-3939-4012-966c-b0571f992df4-marketplace-trusted-ca\") pod \"4aea8951-3939-4012-966c-b0571f992df4\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.620770 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhzrk\" (UniqueName: \"kubernetes.io/projected/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-kube-api-access-fhzrk\") pod \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\" (UID: \"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.620800 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-catalog-content\") pod \"fe5f4376-0558-4824-a2d8-119c74a082eb\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.620871 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-catalog-content\") pod \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.621755 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4aea8951-3939-4012-966c-b0571f992df4-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "4aea8951-3939-4012-966c-b0571f992df4" (UID: "4aea8951-3939-4012-966c-b0571f992df4"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.621778 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-utilities" (OuterVolumeSpecName: "utilities") pod "e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" (UID: "e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622200 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-utilities\") pod \"fe5f4376-0558-4824-a2d8-119c74a082eb\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622234 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kh44\" (UniqueName: \"kubernetes.io/projected/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-kube-api-access-8kh44\") pod \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622268 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-utilities\") pod \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622657 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq5dz\" (UniqueName: \"kubernetes.io/projected/4aea8951-3939-4012-966c-b0571f992df4-kube-api-access-mq5dz\") pod \"4aea8951-3939-4012-966c-b0571f992df4\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622688 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4aea8951-3939-4012-966c-b0571f992df4-marketplace-operator-metrics\") pod \"4aea8951-3939-4012-966c-b0571f992df4\" (UID: \"4aea8951-3939-4012-966c-b0571f992df4\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622716 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6226j\" (UniqueName: \"kubernetes.io/projected/fe5f4376-0558-4824-a2d8-119c74a082eb-kube-api-access-6226j\") pod \"fe5f4376-0558-4824-a2d8-119c74a082eb\" (UID: \"fe5f4376-0558-4824-a2d8-119c74a082eb\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622749 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-utilities\") pod \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622766 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svs9t\" (UniqueName: \"kubernetes.io/projected/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-kube-api-access-svs9t\") pod \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\" (UID: \"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.622797 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-catalog-content\") pod \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\" (UID: \"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2\") " Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.623053 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-utilities" (OuterVolumeSpecName: "utilities") pod "fe5f4376-0558-4824-a2d8-119c74a082eb" (UID: "fe5f4376-0558-4824-a2d8-119c74a082eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.623087 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-utilities" (OuterVolumeSpecName: "utilities") pod "8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" (UID: "8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.623107 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.623124 4774 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4aea8951-3939-4012-966c-b0571f992df4-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.624107 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-utilities" (OuterVolumeSpecName: "utilities") pod "a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" (UID: "a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.626312 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-kube-api-access-fhzrk" (OuterVolumeSpecName: "kube-api-access-fhzrk") pod "e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" (UID: "e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411"). InnerVolumeSpecName "kube-api-access-fhzrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.630039 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-kube-api-access-svs9t" (OuterVolumeSpecName: "kube-api-access-svs9t") pod "a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" (UID: "a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a"). InnerVolumeSpecName "kube-api-access-svs9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.632812 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe5f4376-0558-4824-a2d8-119c74a082eb-kube-api-access-6226j" (OuterVolumeSpecName: "kube-api-access-6226j") pod "fe5f4376-0558-4824-a2d8-119c74a082eb" (UID: "fe5f4376-0558-4824-a2d8-119c74a082eb"). InnerVolumeSpecName "kube-api-access-6226j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.632854 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-kube-api-access-8kh44" (OuterVolumeSpecName: "kube-api-access-8kh44") pod "8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" (UID: "8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2"). InnerVolumeSpecName "kube-api-access-8kh44". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.633679 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4aea8951-3939-4012-966c-b0571f992df4-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "4aea8951-3939-4012-966c-b0571f992df4" (UID: "4aea8951-3939-4012-966c-b0571f992df4"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.635714 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4aea8951-3939-4012-966c-b0571f992df4-kube-api-access-mq5dz" (OuterVolumeSpecName: "kube-api-access-mq5dz") pod "4aea8951-3939-4012-966c-b0571f992df4" (UID: "4aea8951-3939-4012-966c-b0571f992df4"). InnerVolumeSpecName "kube-api-access-mq5dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.654496 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" (UID: "8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.682013 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" (UID: "e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.687644 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" (UID: "a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.687971 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5nkpf"] Nov 21 14:08:56 crc kubenswrapper[4774]: W1121 14:08:56.696740 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81b3a21d_90d3_446a_b6ab_f3be7356fd56.slice/crio-1198054570f49841bead05d9d0e4505e247c636c4180d3ccd709f94165b74073 WatchSource:0}: Error finding container 1198054570f49841bead05d9d0e4505e247c636c4180d3ccd709f94165b74073: Status 404 returned error can't find the container with id 1198054570f49841bead05d9d0e4505e247c636c4180d3ccd709f94165b74073 Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724503 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724540 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svs9t\" (UniqueName: \"kubernetes.io/projected/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-kube-api-access-svs9t\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724554 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724568 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724580 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhzrk\" (UniqueName: \"kubernetes.io/projected/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411-kube-api-access-fhzrk\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724594 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724604 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724616 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kh44\" (UniqueName: \"kubernetes.io/projected/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-kube-api-access-8kh44\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724627 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724640 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq5dz\" (UniqueName: \"kubernetes.io/projected/4aea8951-3939-4012-966c-b0571f992df4-kube-api-access-mq5dz\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724654 4774 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/4aea8951-3939-4012-966c-b0571f992df4-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.724666 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6226j\" (UniqueName: \"kubernetes.io/projected/fe5f4376-0558-4824-a2d8-119c74a082eb-kube-api-access-6226j\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.735184 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe5f4376-0558-4824-a2d8-119c74a082eb" (UID: "fe5f4376-0558-4824-a2d8-119c74a082eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:08:56 crc kubenswrapper[4774]: I1121 14:08:56.825414 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe5f4376-0558-4824-a2d8-119c74a082eb-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.320541 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q922n" event={"ID":"8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2","Type":"ContainerDied","Data":"f9a49edc0454ab4fee1c38f1b177d5aa4c93df7184a9f046edd771556dba3cec"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.320602 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q922n" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.320807 4774 scope.go:117] "RemoveContainer" containerID="7a262c4217968154707731aedcb93e0f3d47d15d981eedcd7173aecfa99d38c8" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.323038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xt4q5" event={"ID":"e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411","Type":"ContainerDied","Data":"005e24275f47c825f8be76617a2dbf480fa024196b9700364f5f7d8ea06a3230"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.323108 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xt4q5" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.329161 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zd7lj" event={"ID":"a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a","Type":"ContainerDied","Data":"73536bdf25aea75d3fe81ffa320460732955869cf7422f7c5dfbc9f159c21ff5"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.329247 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zd7lj" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.334243 4774 scope.go:117] "RemoveContainer" containerID="8e8a3bf4e1c2708bd6ba12036a1509b5c2cb0351d1ea3dde246929dbab82a799" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.336450 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k42jk" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.336459 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k42jk" event={"ID":"fe5f4376-0558-4824-a2d8-119c74a082eb","Type":"ContainerDied","Data":"9fad8a905c9c86561980a2e4259d35c7c10c9e3884bb768f096a7d0d388541fe"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.337864 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" event={"ID":"81b3a21d-90d3-446a-b6ab-f3be7356fd56","Type":"ContainerStarted","Data":"b8f8ae5698e6612b4adb0f827f5a7599897d05ce0e1e0a68719b6d23847f211c"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.337930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" event={"ID":"81b3a21d-90d3-446a-b6ab-f3be7356fd56","Type":"ContainerStarted","Data":"1198054570f49841bead05d9d0e4505e247c636c4180d3ccd709f94165b74073"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.337951 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.340214 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" event={"ID":"4aea8951-3939-4012-966c-b0571f992df4","Type":"ContainerDied","Data":"0ab8bfd562278b6f2e2ea1867bad7db6edea514d49125d3fb051c0d7013176b8"} Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.340264 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-gsdtl" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.348939 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.352287 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q922n"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.354147 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q922n"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.377704 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5nkpf" podStartSLOduration=1.377680887 podStartE2EDuration="1.377680887s" podCreationTimestamp="2025-11-21 14:08:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:08:57.366311383 +0000 UTC m=+328.018510652" watchObservedRunningTime="2025-11-21 14:08:57.377680887 +0000 UTC m=+328.029880216" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.378508 4774 scope.go:117] "RemoveContainer" containerID="8f08e873b8e1e711883e01a50283f7513d9eeaebadd417124bc9542aefc94985" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.429130 4774 scope.go:117] "RemoveContainer" containerID="55cade1890016d5a13468a52cfd59adb69ccfd17b8649ca6b12495f3806a9879" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.430274 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xt4q5"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.433688 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xt4q5"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.445680 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k42jk"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.449571 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k42jk"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.465150 4774 scope.go:117] "RemoveContainer" containerID="e8602aae73c0dca4e63af61b3163c1316b82437931e9f33e1a62cc0d0ff6c938" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.476960 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zd7lj"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.479474 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zd7lj"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.482003 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gsdtl"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.488176 4774 scope.go:117] "RemoveContainer" containerID="b8ce4dc14e7577582eb7852d0655efcf66a21abffd81e266a315b61d2f29aa12" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.490921 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-gsdtl"] Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.501032 4774 scope.go:117] "RemoveContainer" containerID="2acc656be78909f5a7da9db73fbb6d9f4b013d4f516f60ce56ab44f4651f1449" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.515419 4774 scope.go:117] "RemoveContainer" containerID="5738fff86a8f4598c438f62df944eab376c453df37e321feae6a5754337e46c8" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.535904 4774 scope.go:117] "RemoveContainer" containerID="c57083f8663787da9f33f703979fbfcb4e4abe324070aee75e4a275bb00d1cbc" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.560900 4774 scope.go:117] "RemoveContainer" containerID="1d8adb5f5c07861da2ee43f3906018923581faf4a854b39d907f311e56c5d68a" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.574174 4774 scope.go:117] "RemoveContainer" containerID="323138e463303dd707163620b3580837c8be3805ba37a51422523f5d102ce3b5" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.591049 4774 scope.go:117] "RemoveContainer" containerID="f80abea7512fdcb241239e378640112fc083163b50216c2de33a818d946e29b4" Nov 21 14:08:57 crc kubenswrapper[4774]: I1121 14:08:57.604295 4774 scope.go:117] "RemoveContainer" containerID="2114d486f39320a0ed11e3f3fcde82aece19974603afe5268eae303440a5f017" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.179657 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4aea8951-3939-4012-966c-b0571f992df4" path="/var/lib/kubelet/pods/4aea8951-3939-4012-966c-b0571f992df4/volumes" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.180477 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" path="/var/lib/kubelet/pods/8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2/volumes" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.181417 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" path="/var/lib/kubelet/pods/a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a/volumes" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.184171 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" path="/var/lib/kubelet/pods/e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411/volumes" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.185170 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" path="/var/lib/kubelet/pods/fe5f4376-0558-4824-a2d8-119c74a082eb/volumes" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.202707 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-97n98"] Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203206 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203218 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203236 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203242 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203255 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203262 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203271 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203276 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203283 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203289 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203298 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203303 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203311 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203317 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203325 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203330 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203340 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203346 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203356 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203361 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203370 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203376 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203386 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203393 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="extract-utilities" Nov 21 14:08:58 crc kubenswrapper[4774]: E1121 14:08:58.203401 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203408 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="extract-content" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203485 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8600e8eb-acb2-4a1c-9aaf-53e02ddbe2c2" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203495 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8349c9b-3b4f-4b5d-ab74-d70fcc3e9411" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203508 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe5f4376-0558-4824-a2d8-119c74a082eb" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203516 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2b3d86f-150e-45fc-8de0-75f3ad3c9c1a" containerName="registry-server" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.203524 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4aea8951-3939-4012-966c-b0571f992df4" containerName="marketplace-operator" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.204195 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.206635 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.220228 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97n98"] Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.344260 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-catalog-content\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.344327 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qznd4\" (UniqueName: \"kubernetes.io/projected/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-kube-api-access-qznd4\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.344463 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-utilities\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.399588 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rmcf7"] Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.400856 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.404351 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.415758 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rmcf7"] Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.445512 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-catalog-content\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.445581 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qznd4\" (UniqueName: \"kubernetes.io/projected/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-kube-api-access-qznd4\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.445680 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-utilities\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.446674 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-utilities\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.446711 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-catalog-content\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.465771 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qznd4\" (UniqueName: \"kubernetes.io/projected/8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb-kube-api-access-qznd4\") pod \"certified-operators-97n98\" (UID: \"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb\") " pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.522730 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.546560 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-catalog-content\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.546626 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmr8s\" (UniqueName: \"kubernetes.io/projected/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-kube-api-access-zmr8s\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.546659 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-utilities\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.647739 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-catalog-content\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.647792 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmr8s\" (UniqueName: \"kubernetes.io/projected/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-kube-api-access-zmr8s\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.647836 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-utilities\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.648582 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-catalog-content\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.648625 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-utilities\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.668684 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmr8s\" (UniqueName: \"kubernetes.io/projected/4f9fbbc3-d25f-46e1-9db6-49a5505385cb-kube-api-access-zmr8s\") pod \"redhat-marketplace-rmcf7\" (UID: \"4f9fbbc3-d25f-46e1-9db6-49a5505385cb\") " pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.725601 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.896023 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rmcf7"] Nov 21 14:08:58 crc kubenswrapper[4774]: I1121 14:08:58.898247 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-97n98"] Nov 21 14:08:58 crc kubenswrapper[4774]: W1121 14:08:58.905627 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ff4d566_1b6b_47b0_8112_6fdd7a77ecbb.slice/crio-409972a86849768c85915ca2a214e24f5db070c8c3bb3a83742edf1e1d2b4c83 WatchSource:0}: Error finding container 409972a86849768c85915ca2a214e24f5db070c8c3bb3a83742edf1e1d2b4c83: Status 404 returned error can't find the container with id 409972a86849768c85915ca2a214e24f5db070c8c3bb3a83742edf1e1d2b4c83 Nov 21 14:08:58 crc kubenswrapper[4774]: W1121 14:08:58.906666 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9fbbc3_d25f_46e1_9db6_49a5505385cb.slice/crio-0d6cd02ec559d463c8ab74c0b0d91dbc0e460aadd02e0efd1bcc7d23eb2e8143 WatchSource:0}: Error finding container 0d6cd02ec559d463c8ab74c0b0d91dbc0e460aadd02e0efd1bcc7d23eb2e8143: Status 404 returned error can't find the container with id 0d6cd02ec559d463c8ab74c0b0d91dbc0e460aadd02e0efd1bcc7d23eb2e8143 Nov 21 14:08:59 crc kubenswrapper[4774]: I1121 14:08:59.359865 4774 generic.go:334] "Generic (PLEG): container finished" podID="8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb" containerID="789d538b5691c87d19f82c35fd13968cd7d801d1b2e306637b4b335618334b18" exitCode=0 Nov 21 14:08:59 crc kubenswrapper[4774]: I1121 14:08:59.360187 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97n98" event={"ID":"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb","Type":"ContainerDied","Data":"789d538b5691c87d19f82c35fd13968cd7d801d1b2e306637b4b335618334b18"} Nov 21 14:08:59 crc kubenswrapper[4774]: I1121 14:08:59.360225 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97n98" event={"ID":"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb","Type":"ContainerStarted","Data":"409972a86849768c85915ca2a214e24f5db070c8c3bb3a83742edf1e1d2b4c83"} Nov 21 14:08:59 crc kubenswrapper[4774]: I1121 14:08:59.364702 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rmcf7" event={"ID":"4f9fbbc3-d25f-46e1-9db6-49a5505385cb","Type":"ContainerDied","Data":"cbb2720a22a9d71ad84468ce8252f11a3e1069cf6f08227b0282e5921f9ead7f"} Nov 21 14:08:59 crc kubenswrapper[4774]: I1121 14:08:59.364518 4774 generic.go:334] "Generic (PLEG): container finished" podID="4f9fbbc3-d25f-46e1-9db6-49a5505385cb" containerID="cbb2720a22a9d71ad84468ce8252f11a3e1069cf6f08227b0282e5921f9ead7f" exitCode=0 Nov 21 14:08:59 crc kubenswrapper[4774]: I1121 14:08:59.365340 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rmcf7" event={"ID":"4f9fbbc3-d25f-46e1-9db6-49a5505385cb","Type":"ContainerStarted","Data":"0d6cd02ec559d463c8ab74c0b0d91dbc0e460aadd02e0efd1bcc7d23eb2e8143"} Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.374045 4774 generic.go:334] "Generic (PLEG): container finished" podID="8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb" containerID="8116f6ed89c73d1c2971d47de1b63d71f260724d3604826a6673ea7184e9f57b" exitCode=0 Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.374393 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97n98" event={"ID":"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb","Type":"ContainerDied","Data":"8116f6ed89c73d1c2971d47de1b63d71f260724d3604826a6673ea7184e9f57b"} Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.376357 4774 generic.go:334] "Generic (PLEG): container finished" podID="4f9fbbc3-d25f-46e1-9db6-49a5505385cb" containerID="32cbe1422d7d5d205a43819febe444e572ff4062b7163f4662072672b3c73100" exitCode=0 Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.376390 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rmcf7" event={"ID":"4f9fbbc3-d25f-46e1-9db6-49a5505385cb","Type":"ContainerDied","Data":"32cbe1422d7d5d205a43819febe444e572ff4062b7163f4662072672b3c73100"} Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.599095 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-crjv9"] Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.600092 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.602020 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.610119 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-crjv9"] Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.775077 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739d794c-64f6-4e61-8761-2e270429f355-utilities\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.775147 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739d794c-64f6-4e61-8761-2e270429f355-catalog-content\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.775202 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxndz\" (UniqueName: \"kubernetes.io/projected/739d794c-64f6-4e61-8761-2e270429f355-kube-api-access-xxndz\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.804899 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-stwtl"] Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.806359 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.812927 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-stwtl"] Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.813783 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.877100 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxndz\" (UniqueName: \"kubernetes.io/projected/739d794c-64f6-4e61-8761-2e270429f355-kube-api-access-xxndz\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.877174 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739d794c-64f6-4e61-8761-2e270429f355-utilities\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.877211 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739d794c-64f6-4e61-8761-2e270429f355-catalog-content\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.877661 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/739d794c-64f6-4e61-8761-2e270429f355-catalog-content\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.877853 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/739d794c-64f6-4e61-8761-2e270429f355-utilities\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.907783 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxndz\" (UniqueName: \"kubernetes.io/projected/739d794c-64f6-4e61-8761-2e270429f355-kube-api-access-xxndz\") pod \"redhat-operators-crjv9\" (UID: \"739d794c-64f6-4e61-8761-2e270429f355\") " pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.921564 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.979111 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a661380a-1987-4c11-a202-448d6fca796e-catalog-content\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.979270 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a661380a-1987-4c11-a202-448d6fca796e-utilities\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:00 crc kubenswrapper[4774]: I1121 14:09:00.979344 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k74hr\" (UniqueName: \"kubernetes.io/projected/a661380a-1987-4c11-a202-448d6fca796e-kube-api-access-k74hr\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.081792 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k74hr\" (UniqueName: \"kubernetes.io/projected/a661380a-1987-4c11-a202-448d6fca796e-kube-api-access-k74hr\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.082362 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a661380a-1987-4c11-a202-448d6fca796e-catalog-content\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.082535 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a661380a-1987-4c11-a202-448d6fca796e-utilities\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.083106 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a661380a-1987-4c11-a202-448d6fca796e-catalog-content\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.083180 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a661380a-1987-4c11-a202-448d6fca796e-utilities\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.104217 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k74hr\" (UniqueName: \"kubernetes.io/projected/a661380a-1987-4c11-a202-448d6fca796e-kube-api-access-k74hr\") pod \"community-operators-stwtl\" (UID: \"a661380a-1987-4c11-a202-448d6fca796e\") " pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.123935 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-crjv9"] Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.131125 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:01 crc kubenswrapper[4774]: W1121 14:09:01.136964 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod739d794c_64f6_4e61_8761_2e270429f355.slice/crio-1e5791706ecef35c741b9afede950b5ed25d3d0cea038fd34dfe605088fb84db WatchSource:0}: Error finding container 1e5791706ecef35c741b9afede950b5ed25d3d0cea038fd34dfe605088fb84db: Status 404 returned error can't find the container with id 1e5791706ecef35c741b9afede950b5ed25d3d0cea038fd34dfe605088fb84db Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.387311 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rmcf7" event={"ID":"4f9fbbc3-d25f-46e1-9db6-49a5505385cb","Type":"ContainerStarted","Data":"3919e6c5f145c943bc361bee91532d920588ac116cc3977ca871f32b5511e6b7"} Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.391213 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-97n98" event={"ID":"8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb","Type":"ContainerStarted","Data":"4af829fb4f5495ed5850bc177418f493786c1ac5c74ae2be4e52c42258ca01c8"} Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.394394 4774 generic.go:334] "Generic (PLEG): container finished" podID="739d794c-64f6-4e61-8761-2e270429f355" containerID="6629313d2e18430945e277bf4b8567ae56ad6075aad61ecaeea571e90132daa3" exitCode=0 Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.394432 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crjv9" event={"ID":"739d794c-64f6-4e61-8761-2e270429f355","Type":"ContainerDied","Data":"6629313d2e18430945e277bf4b8567ae56ad6075aad61ecaeea571e90132daa3"} Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.394451 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crjv9" event={"ID":"739d794c-64f6-4e61-8761-2e270429f355","Type":"ContainerStarted","Data":"1e5791706ecef35c741b9afede950b5ed25d3d0cea038fd34dfe605088fb84db"} Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.407524 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rmcf7" podStartSLOduration=1.9172246560000001 podStartE2EDuration="3.407496485s" podCreationTimestamp="2025-11-21 14:08:58 +0000 UTC" firstStartedPulling="2025-11-21 14:08:59.366411292 +0000 UTC m=+330.018610551" lastFinishedPulling="2025-11-21 14:09:00.856683121 +0000 UTC m=+331.508882380" observedRunningTime="2025-11-21 14:09:01.40732462 +0000 UTC m=+332.059523879" watchObservedRunningTime="2025-11-21 14:09:01.407496485 +0000 UTC m=+332.059695744" Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.414356 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-stwtl"] Nov 21 14:09:01 crc kubenswrapper[4774]: W1121 14:09:01.428101 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda661380a_1987_4c11_a202_448d6fca796e.slice/crio-e6394c88d7eda358d7f5a50227c55a77f147da85daa7a2051a563d016d1ec6ac WatchSource:0}: Error finding container e6394c88d7eda358d7f5a50227c55a77f147da85daa7a2051a563d016d1ec6ac: Status 404 returned error can't find the container with id e6394c88d7eda358d7f5a50227c55a77f147da85daa7a2051a563d016d1ec6ac Nov 21 14:09:01 crc kubenswrapper[4774]: I1121 14:09:01.454683 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-97n98" podStartSLOduration=1.9300830439999999 podStartE2EDuration="3.45466319s" podCreationTimestamp="2025-11-21 14:08:58 +0000 UTC" firstStartedPulling="2025-11-21 14:08:59.361843188 +0000 UTC m=+330.014042437" lastFinishedPulling="2025-11-21 14:09:00.886423324 +0000 UTC m=+331.538622583" observedRunningTime="2025-11-21 14:09:01.452055883 +0000 UTC m=+332.104255172" watchObservedRunningTime="2025-11-21 14:09:01.45466319 +0000 UTC m=+332.106862449" Nov 21 14:09:02 crc kubenswrapper[4774]: I1121 14:09:02.401970 4774 generic.go:334] "Generic (PLEG): container finished" podID="a661380a-1987-4c11-a202-448d6fca796e" containerID="e7ffa6d69ed9a1e9008d80f99045edbb5bfee24d4da9d0b48761f051a51b58d1" exitCode=0 Nov 21 14:09:02 crc kubenswrapper[4774]: I1121 14:09:02.402394 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stwtl" event={"ID":"a661380a-1987-4c11-a202-448d6fca796e","Type":"ContainerDied","Data":"e7ffa6d69ed9a1e9008d80f99045edbb5bfee24d4da9d0b48761f051a51b58d1"} Nov 21 14:09:02 crc kubenswrapper[4774]: I1121 14:09:02.402427 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stwtl" event={"ID":"a661380a-1987-4c11-a202-448d6fca796e","Type":"ContainerStarted","Data":"e6394c88d7eda358d7f5a50227c55a77f147da85daa7a2051a563d016d1ec6ac"} Nov 21 14:09:02 crc kubenswrapper[4774]: I1121 14:09:02.406961 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crjv9" event={"ID":"739d794c-64f6-4e61-8761-2e270429f355","Type":"ContainerStarted","Data":"ef285aff0769da701b1ff79c3426e44e2e2d478de1b539e7d1ff2c47f03d423b"} Nov 21 14:09:03 crc kubenswrapper[4774]: I1121 14:09:03.414062 4774 generic.go:334] "Generic (PLEG): container finished" podID="739d794c-64f6-4e61-8761-2e270429f355" containerID="ef285aff0769da701b1ff79c3426e44e2e2d478de1b539e7d1ff2c47f03d423b" exitCode=0 Nov 21 14:09:03 crc kubenswrapper[4774]: I1121 14:09:03.414166 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crjv9" event={"ID":"739d794c-64f6-4e61-8761-2e270429f355","Type":"ContainerDied","Data":"ef285aff0769da701b1ff79c3426e44e2e2d478de1b539e7d1ff2c47f03d423b"} Nov 21 14:09:04 crc kubenswrapper[4774]: I1121 14:09:04.421231 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-crjv9" event={"ID":"739d794c-64f6-4e61-8761-2e270429f355","Type":"ContainerStarted","Data":"a3048c69ff982549c87ceaa51481a880e20bddb8017cb3357fbfc6f6fc19ab55"} Nov 21 14:09:04 crc kubenswrapper[4774]: I1121 14:09:04.422865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stwtl" event={"ID":"a661380a-1987-4c11-a202-448d6fca796e","Type":"ContainerStarted","Data":"a33d9119ed0e1f798518a8dfcb99fd849d8ea5466913fe8940b89c55af04d547"} Nov 21 14:09:04 crc kubenswrapper[4774]: I1121 14:09:04.441996 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-crjv9" podStartSLOduration=1.684051016 podStartE2EDuration="4.441971506s" podCreationTimestamp="2025-11-21 14:09:00 +0000 UTC" firstStartedPulling="2025-11-21 14:09:01.396026398 +0000 UTC m=+332.048225647" lastFinishedPulling="2025-11-21 14:09:04.153946878 +0000 UTC m=+334.806146137" observedRunningTime="2025-11-21 14:09:04.439986287 +0000 UTC m=+335.092185536" watchObservedRunningTime="2025-11-21 14:09:04.441971506 +0000 UTC m=+335.094170765" Nov 21 14:09:05 crc kubenswrapper[4774]: I1121 14:09:05.434240 4774 generic.go:334] "Generic (PLEG): container finished" podID="a661380a-1987-4c11-a202-448d6fca796e" containerID="a33d9119ed0e1f798518a8dfcb99fd849d8ea5466913fe8940b89c55af04d547" exitCode=0 Nov 21 14:09:05 crc kubenswrapper[4774]: I1121 14:09:05.434380 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stwtl" event={"ID":"a661380a-1987-4c11-a202-448d6fca796e","Type":"ContainerDied","Data":"a33d9119ed0e1f798518a8dfcb99fd849d8ea5466913fe8940b89c55af04d547"} Nov 21 14:09:06 crc kubenswrapper[4774]: I1121 14:09:06.441728 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stwtl" event={"ID":"a661380a-1987-4c11-a202-448d6fca796e","Type":"ContainerStarted","Data":"cee963cf8daeaa1811301b2125019b4c0f753354a2afc4e6e201947e46d37f1f"} Nov 21 14:09:06 crc kubenswrapper[4774]: I1121 14:09:06.461918 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-stwtl" podStartSLOduration=2.827046536 podStartE2EDuration="6.461902186s" podCreationTimestamp="2025-11-21 14:09:00 +0000 UTC" firstStartedPulling="2025-11-21 14:09:02.403370717 +0000 UTC m=+333.055569976" lastFinishedPulling="2025-11-21 14:09:06.038226367 +0000 UTC m=+336.690425626" observedRunningTime="2025-11-21 14:09:06.461279778 +0000 UTC m=+337.113479047" watchObservedRunningTime="2025-11-21 14:09:06.461902186 +0000 UTC m=+337.114101445" Nov 21 14:09:08 crc kubenswrapper[4774]: I1121 14:09:08.523045 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:09:08 crc kubenswrapper[4774]: I1121 14:09:08.523679 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:09:08 crc kubenswrapper[4774]: I1121 14:09:08.577452 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:09:08 crc kubenswrapper[4774]: I1121 14:09:08.725939 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:09:08 crc kubenswrapper[4774]: I1121 14:09:08.726173 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:09:08 crc kubenswrapper[4774]: I1121 14:09:08.775564 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:09:09 crc kubenswrapper[4774]: I1121 14:09:09.497234 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rmcf7" Nov 21 14:09:09 crc kubenswrapper[4774]: I1121 14:09:09.498348 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-97n98" Nov 21 14:09:10 crc kubenswrapper[4774]: I1121 14:09:10.921944 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:10 crc kubenswrapper[4774]: I1121 14:09:10.922320 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:10 crc kubenswrapper[4774]: I1121 14:09:10.960357 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:11 crc kubenswrapper[4774]: I1121 14:09:11.132012 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:11 crc kubenswrapper[4774]: I1121 14:09:11.132064 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:11 crc kubenswrapper[4774]: I1121 14:09:11.174861 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:11 crc kubenswrapper[4774]: I1121 14:09:11.509355 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-crjv9" Nov 21 14:09:11 crc kubenswrapper[4774]: I1121 14:09:11.513221 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-stwtl" Nov 21 14:09:29 crc kubenswrapper[4774]: I1121 14:09:29.601332 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:09:29 crc kubenswrapper[4774]: I1121 14:09:29.601955 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:09:59 crc kubenswrapper[4774]: I1121 14:09:59.601406 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:09:59 crc kubenswrapper[4774]: I1121 14:09:59.602211 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.600867 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.601533 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.601595 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.602359 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2595c16f9eff7964aa17f69ce9e0ff010fda0dfd486f4645209c9e40e8a6db69"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.602450 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://2595c16f9eff7964aa17f69ce9e0ff010fda0dfd486f4645209c9e40e8a6db69" gracePeriod=600 Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.916128 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="2595c16f9eff7964aa17f69ce9e0ff010fda0dfd486f4645209c9e40e8a6db69" exitCode=0 Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.916216 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"2595c16f9eff7964aa17f69ce9e0ff010fda0dfd486f4645209c9e40e8a6db69"} Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.916635 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"5c293a6fa454f16330a12afe7778312d3179ec19bd0774a3474cca046515ae32"} Nov 21 14:10:29 crc kubenswrapper[4774]: I1121 14:10:29.916662 4774 scope.go:117] "RemoveContainer" containerID="bde291e2b72d0e766da69ef94f88e9f0c36ec94d56f16cd446a55bc668c5ce16" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.218509 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-whzxd"] Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.219732 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.236613 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-whzxd"] Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380637 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-registry-tls\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380705 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380751 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-bound-sa-token\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380768 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb2kg\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-kube-api-access-rb2kg\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380788 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-ca-trust-extracted\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-installation-pull-secrets\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380843 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-registry-certificates\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.380861 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-trusted-ca\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.399494 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482557 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-bound-sa-token\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482608 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb2kg\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-kube-api-access-rb2kg\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482634 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-ca-trust-extracted\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482649 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-installation-pull-secrets\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482680 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-registry-certificates\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482701 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-trusted-ca\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.482742 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-registry-tls\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.483732 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-ca-trust-extracted\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.484735 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-registry-certificates\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.484941 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-trusted-ca\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.488957 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-registry-tls\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.489337 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-installation-pull-secrets\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.499762 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-bound-sa-token\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.501265 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb2kg\" (UniqueName: \"kubernetes.io/projected/4b00eedc-b70b-4d5d-bec8-b2c5222bdeee-kube-api-access-rb2kg\") pod \"image-registry-66df7c8f76-whzxd\" (UID: \"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee\") " pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.537529 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:37 crc kubenswrapper[4774]: I1121 14:11:37.741088 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-whzxd"] Nov 21 14:11:38 crc kubenswrapper[4774]: I1121 14:11:38.460048 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" event={"ID":"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee","Type":"ContainerStarted","Data":"57644146ec913f511b3825f5476f038fce42519b7f3e4edf7800b2d9025d31ba"} Nov 21 14:11:38 crc kubenswrapper[4774]: I1121 14:11:38.460141 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" event={"ID":"4b00eedc-b70b-4d5d-bec8-b2c5222bdeee","Type":"ContainerStarted","Data":"d7e5e66f99b5530b9e38efe44d12605c90eb149d922ce61c0bc99167325b9084"} Nov 21 14:11:38 crc kubenswrapper[4774]: I1121 14:11:38.460237 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:38 crc kubenswrapper[4774]: I1121 14:11:38.483424 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" podStartSLOduration=1.4834001159999999 podStartE2EDuration="1.483400116s" podCreationTimestamp="2025-11-21 14:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:11:38.482441267 +0000 UTC m=+489.134640546" watchObservedRunningTime="2025-11-21 14:11:38.483400116 +0000 UTC m=+489.135599375" Nov 21 14:11:57 crc kubenswrapper[4774]: I1121 14:11:57.812129 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-whzxd" Nov 21 14:11:57 crc kubenswrapper[4774]: I1121 14:11:57.870531 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pgwc4"] Nov 21 14:12:22 crc kubenswrapper[4774]: I1121 14:12:22.931588 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" podUID="f1760383-3b9d-4c38-b474-75ec72a82819" containerName="registry" containerID="cri-o://a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd" gracePeriod=30 Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.280594 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420270 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f1760383-3b9d-4c38-b474-75ec72a82819-installation-pull-secrets\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420371 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-bound-sa-token\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420443 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f1760383-3b9d-4c38-b474-75ec72a82819-ca-trust-extracted\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420487 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-trusted-ca\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420527 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-registry-tls\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420564 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm5dv\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-kube-api-access-pm5dv\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420652 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-registry-certificates\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.420886 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"f1760383-3b9d-4c38-b474-75ec72a82819\" (UID: \"f1760383-3b9d-4c38-b474-75ec72a82819\") " Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.422245 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.422356 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.430225 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1760383-3b9d-4c38-b474-75ec72a82819-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.430325 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-kube-api-access-pm5dv" (OuterVolumeSpecName: "kube-api-access-pm5dv") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "kube-api-access-pm5dv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.431177 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.431373 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.436702 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.446107 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1760383-3b9d-4c38-b474-75ec72a82819-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "f1760383-3b9d-4c38-b474-75ec72a82819" (UID: "f1760383-3b9d-4c38-b474-75ec72a82819"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522486 4774 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/f1760383-3b9d-4c38-b474-75ec72a82819-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522530 4774 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522541 4774 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/f1760383-3b9d-4c38-b474-75ec72a82819-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522550 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522560 4774 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522569 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm5dv\" (UniqueName: \"kubernetes.io/projected/f1760383-3b9d-4c38-b474-75ec72a82819-kube-api-access-pm5dv\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.522579 4774 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/f1760383-3b9d-4c38-b474-75ec72a82819-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.989030 4774 generic.go:334] "Generic (PLEG): container finished" podID="f1760383-3b9d-4c38-b474-75ec72a82819" containerID="a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd" exitCode=0 Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.989107 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.989109 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" event={"ID":"f1760383-3b9d-4c38-b474-75ec72a82819","Type":"ContainerDied","Data":"a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd"} Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.989260 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pgwc4" event={"ID":"f1760383-3b9d-4c38-b474-75ec72a82819","Type":"ContainerDied","Data":"aa3d40005519f42c657889685eed5a3779ef270b3ad0911876b637f707b100ad"} Nov 21 14:12:23 crc kubenswrapper[4774]: I1121 14:12:23.989304 4774 scope.go:117] "RemoveContainer" containerID="a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd" Nov 21 14:12:24 crc kubenswrapper[4774]: I1121 14:12:24.011450 4774 scope.go:117] "RemoveContainer" containerID="a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd" Nov 21 14:12:24 crc kubenswrapper[4774]: E1121 14:12:24.012401 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd\": container with ID starting with a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd not found: ID does not exist" containerID="a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd" Nov 21 14:12:24 crc kubenswrapper[4774]: I1121 14:12:24.012442 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd"} err="failed to get container status \"a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd\": rpc error: code = NotFound desc = could not find container \"a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd\": container with ID starting with a858cd77eaf1d4cc522706e987118f9bf5efa68b7dac8212d83cd38a94fcfccd not found: ID does not exist" Nov 21 14:12:24 crc kubenswrapper[4774]: I1121 14:12:24.020301 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pgwc4"] Nov 21 14:12:24 crc kubenswrapper[4774]: I1121 14:12:24.030878 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pgwc4"] Nov 21 14:12:24 crc kubenswrapper[4774]: I1121 14:12:24.100851 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1760383-3b9d-4c38-b474-75ec72a82819" path="/var/lib/kubelet/pods/f1760383-3b9d-4c38-b474-75ec72a82819/volumes" Nov 21 14:12:29 crc kubenswrapper[4774]: I1121 14:12:29.601386 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:12:29 crc kubenswrapper[4774]: I1121 14:12:29.602345 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:12:59 crc kubenswrapper[4774]: I1121 14:12:59.600947 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:12:59 crc kubenswrapper[4774]: I1121 14:12:59.601528 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:13:29 crc kubenswrapper[4774]: I1121 14:13:29.601146 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:13:29 crc kubenswrapper[4774]: I1121 14:13:29.602098 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:13:29 crc kubenswrapper[4774]: I1121 14:13:29.602188 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:13:29 crc kubenswrapper[4774]: I1121 14:13:29.603018 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c293a6fa454f16330a12afe7778312d3179ec19bd0774a3474cca046515ae32"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:13:29 crc kubenswrapper[4774]: I1121 14:13:29.603087 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://5c293a6fa454f16330a12afe7778312d3179ec19bd0774a3474cca046515ae32" gracePeriod=600 Nov 21 14:13:30 crc kubenswrapper[4774]: I1121 14:13:30.381174 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="5c293a6fa454f16330a12afe7778312d3179ec19bd0774a3474cca046515ae32" exitCode=0 Nov 21 14:13:30 crc kubenswrapper[4774]: I1121 14:13:30.381234 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"5c293a6fa454f16330a12afe7778312d3179ec19bd0774a3474cca046515ae32"} Nov 21 14:13:30 crc kubenswrapper[4774]: I1121 14:13:30.381840 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"bb913418006b884e0ea3b932253e39752b8814882b052669be5898cc2a7736b4"} Nov 21 14:13:30 crc kubenswrapper[4774]: I1121 14:13:30.381865 4774 scope.go:117] "RemoveContainer" containerID="2595c16f9eff7964aa17f69ce9e0ff010fda0dfd486f4645209c9e40e8a6db69" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.143904 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9"] Nov 21 14:15:00 crc kubenswrapper[4774]: E1121 14:15:00.145791 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1760383-3b9d-4c38-b474-75ec72a82819" containerName="registry" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.145833 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1760383-3b9d-4c38-b474-75ec72a82819" containerName="registry" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.146432 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1760383-3b9d-4c38-b474-75ec72a82819" containerName="registry" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.147518 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.158983 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.159295 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.168061 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9"] Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.226856 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed90be29-4d9f-46c7-b158-074488aad60e-secret-volume\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.227553 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed90be29-4d9f-46c7-b158-074488aad60e-config-volume\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.227983 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvvq4\" (UniqueName: \"kubernetes.io/projected/ed90be29-4d9f-46c7-b158-074488aad60e-kube-api-access-qvvq4\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.329211 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed90be29-4d9f-46c7-b158-074488aad60e-secret-volume\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.329321 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed90be29-4d9f-46c7-b158-074488aad60e-config-volume\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.329405 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvvq4\" (UniqueName: \"kubernetes.io/projected/ed90be29-4d9f-46c7-b158-074488aad60e-kube-api-access-qvvq4\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.330752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed90be29-4d9f-46c7-b158-074488aad60e-config-volume\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.338763 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed90be29-4d9f-46c7-b158-074488aad60e-secret-volume\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.351873 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvvq4\" (UniqueName: \"kubernetes.io/projected/ed90be29-4d9f-46c7-b158-074488aad60e-kube-api-access-qvvq4\") pod \"collect-profiles-29395575-hxvk9\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.478064 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.681901 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9"] Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.905798 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" event={"ID":"ed90be29-4d9f-46c7-b158-074488aad60e","Type":"ContainerStarted","Data":"2354d724114d4e9bac4893889c3264e28535e3216f96885003a0c5b26ad1d7df"} Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.905882 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" event={"ID":"ed90be29-4d9f-46c7-b158-074488aad60e","Type":"ContainerStarted","Data":"bc31c61929433a2b730395f5bb26df67356ab6279bd2abc80cab7243de99c0c5"} Nov 21 14:15:00 crc kubenswrapper[4774]: I1121 14:15:00.924322 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" podStartSLOduration=0.924295 podStartE2EDuration="924.295ms" podCreationTimestamp="2025-11-21 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:15:00.922364871 +0000 UTC m=+691.574564140" watchObservedRunningTime="2025-11-21 14:15:00.924295 +0000 UTC m=+691.576494259" Nov 21 14:15:01 crc kubenswrapper[4774]: I1121 14:15:01.914053 4774 generic.go:334] "Generic (PLEG): container finished" podID="ed90be29-4d9f-46c7-b158-074488aad60e" containerID="2354d724114d4e9bac4893889c3264e28535e3216f96885003a0c5b26ad1d7df" exitCode=0 Nov 21 14:15:01 crc kubenswrapper[4774]: I1121 14:15:01.914123 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" event={"ID":"ed90be29-4d9f-46c7-b158-074488aad60e","Type":"ContainerDied","Data":"2354d724114d4e9bac4893889c3264e28535e3216f96885003a0c5b26ad1d7df"} Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.133359 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.270458 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed90be29-4d9f-46c7-b158-074488aad60e-secret-volume\") pod \"ed90be29-4d9f-46c7-b158-074488aad60e\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.270546 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed90be29-4d9f-46c7-b158-074488aad60e-config-volume\") pod \"ed90be29-4d9f-46c7-b158-074488aad60e\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.270606 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvvq4\" (UniqueName: \"kubernetes.io/projected/ed90be29-4d9f-46c7-b158-074488aad60e-kube-api-access-qvvq4\") pod \"ed90be29-4d9f-46c7-b158-074488aad60e\" (UID: \"ed90be29-4d9f-46c7-b158-074488aad60e\") " Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.271537 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed90be29-4d9f-46c7-b158-074488aad60e-config-volume" (OuterVolumeSpecName: "config-volume") pod "ed90be29-4d9f-46c7-b158-074488aad60e" (UID: "ed90be29-4d9f-46c7-b158-074488aad60e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.277015 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed90be29-4d9f-46c7-b158-074488aad60e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ed90be29-4d9f-46c7-b158-074488aad60e" (UID: "ed90be29-4d9f-46c7-b158-074488aad60e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.277042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed90be29-4d9f-46c7-b158-074488aad60e-kube-api-access-qvvq4" (OuterVolumeSpecName: "kube-api-access-qvvq4") pod "ed90be29-4d9f-46c7-b158-074488aad60e" (UID: "ed90be29-4d9f-46c7-b158-074488aad60e"). InnerVolumeSpecName "kube-api-access-qvvq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.372122 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ed90be29-4d9f-46c7-b158-074488aad60e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.372160 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ed90be29-4d9f-46c7-b158-074488aad60e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.372173 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvvq4\" (UniqueName: \"kubernetes.io/projected/ed90be29-4d9f-46c7-b158-074488aad60e-kube-api-access-qvvq4\") on node \"crc\" DevicePath \"\"" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.927800 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" event={"ID":"ed90be29-4d9f-46c7-b158-074488aad60e","Type":"ContainerDied","Data":"bc31c61929433a2b730395f5bb26df67356ab6279bd2abc80cab7243de99c0c5"} Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.927863 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9" Nov 21 14:15:03 crc kubenswrapper[4774]: I1121 14:15:03.927881 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc31c61929433a2b730395f5bb26df67356ab6279bd2abc80cab7243de99c0c5" Nov 21 14:15:29 crc kubenswrapper[4774]: I1121 14:15:29.601541 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:15:29 crc kubenswrapper[4774]: I1121 14:15:29.602211 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:15:59 crc kubenswrapper[4774]: I1121 14:15:59.601163 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:15:59 crc kubenswrapper[4774]: I1121 14:15:59.602094 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.028463 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d965l"] Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.029575 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" podUID="516ab72d-be26-41a3-8f34-2fce0bf4febb" containerName="controller-manager" containerID="cri-o://4e74fe91c6ab54033fd8833458567569d29f4b2c5b73edf0a016dea1912cd6e7" gracePeriod=30 Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.134296 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp"] Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.134769 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" podUID="223a69f9-6da6-49f6-8dc6-791fdb76a205" containerName="route-controller-manager" containerID="cri-o://a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36" gracePeriod=30 Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.261661 4774 generic.go:334] "Generic (PLEG): container finished" podID="516ab72d-be26-41a3-8f34-2fce0bf4febb" containerID="4e74fe91c6ab54033fd8833458567569d29f4b2c5b73edf0a016dea1912cd6e7" exitCode=0 Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.261976 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" event={"ID":"516ab72d-be26-41a3-8f34-2fce0bf4febb","Type":"ContainerDied","Data":"4e74fe91c6ab54033fd8833458567569d29f4b2c5b73edf0a016dea1912cd6e7"} Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.590961 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.591150 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.663654 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-69d6955dc-v9g4g"] Nov 21 14:16:01 crc kubenswrapper[4774]: E1121 14:16:01.663931 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed90be29-4d9f-46c7-b158-074488aad60e" containerName="collect-profiles" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.663948 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed90be29-4d9f-46c7-b158-074488aad60e" containerName="collect-profiles" Nov 21 14:16:01 crc kubenswrapper[4774]: E1121 14:16:01.663967 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223a69f9-6da6-49f6-8dc6-791fdb76a205" containerName="route-controller-manager" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.663976 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="223a69f9-6da6-49f6-8dc6-791fdb76a205" containerName="route-controller-manager" Nov 21 14:16:01 crc kubenswrapper[4774]: E1121 14:16:01.663998 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="516ab72d-be26-41a3-8f34-2fce0bf4febb" containerName="controller-manager" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.664007 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="516ab72d-be26-41a3-8f34-2fce0bf4febb" containerName="controller-manager" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.664150 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="223a69f9-6da6-49f6-8dc6-791fdb76a205" containerName="route-controller-manager" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.664176 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="516ab72d-be26-41a3-8f34-2fce0bf4febb" containerName="controller-manager" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.664189 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed90be29-4d9f-46c7-b158-074488aad60e" containerName="collect-profiles" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.664655 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.693627 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69d6955dc-v9g4g"] Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.699593 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57"] Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.700549 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.707509 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57"] Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743178 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-client-ca\") pod \"223a69f9-6da6-49f6-8dc6-791fdb76a205\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743290 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca\") pod \"516ab72d-be26-41a3-8f34-2fce0bf4febb\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743336 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-config\") pod \"223a69f9-6da6-49f6-8dc6-791fdb76a205\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743373 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghdt4\" (UniqueName: \"kubernetes.io/projected/223a69f9-6da6-49f6-8dc6-791fdb76a205-kube-api-access-ghdt4\") pod \"223a69f9-6da6-49f6-8dc6-791fdb76a205\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743411 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/223a69f9-6da6-49f6-8dc6-791fdb76a205-serving-cert\") pod \"223a69f9-6da6-49f6-8dc6-791fdb76a205\" (UID: \"223a69f9-6da6-49f6-8dc6-791fdb76a205\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743457 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config\") pod \"516ab72d-be26-41a3-8f34-2fce0bf4febb\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743506 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles\") pod \"516ab72d-be26-41a3-8f34-2fce0bf4febb\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743540 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert\") pod \"516ab72d-be26-41a3-8f34-2fce0bf4febb\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.743627 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbzhp\" (UniqueName: \"kubernetes.io/projected/516ab72d-be26-41a3-8f34-2fce0bf4febb-kube-api-access-zbzhp\") pod \"516ab72d-be26-41a3-8f34-2fce0bf4febb\" (UID: \"516ab72d-be26-41a3-8f34-2fce0bf4febb\") " Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.745428 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-client-ca" (OuterVolumeSpecName: "client-ca") pod "223a69f9-6da6-49f6-8dc6-791fdb76a205" (UID: "223a69f9-6da6-49f6-8dc6-791fdb76a205"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.746617 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "516ab72d-be26-41a3-8f34-2fce0bf4febb" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.746629 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-config" (OuterVolumeSpecName: "config") pod "223a69f9-6da6-49f6-8dc6-791fdb76a205" (UID: "223a69f9-6da6-49f6-8dc6-791fdb76a205"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.746933 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config" (OuterVolumeSpecName: "config") pod "516ab72d-be26-41a3-8f34-2fce0bf4febb" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.747242 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca" (OuterVolumeSpecName: "client-ca") pod "516ab72d-be26-41a3-8f34-2fce0bf4febb" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.754603 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/223a69f9-6da6-49f6-8dc6-791fdb76a205-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "223a69f9-6da6-49f6-8dc6-791fdb76a205" (UID: "223a69f9-6da6-49f6-8dc6-791fdb76a205"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.754740 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/516ab72d-be26-41a3-8f34-2fce0bf4febb-kube-api-access-zbzhp" (OuterVolumeSpecName: "kube-api-access-zbzhp") pod "516ab72d-be26-41a3-8f34-2fce0bf4febb" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb"). InnerVolumeSpecName "kube-api-access-zbzhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.754781 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/223a69f9-6da6-49f6-8dc6-791fdb76a205-kube-api-access-ghdt4" (OuterVolumeSpecName: "kube-api-access-ghdt4") pod "223a69f9-6da6-49f6-8dc6-791fdb76a205" (UID: "223a69f9-6da6-49f6-8dc6-791fdb76a205"). InnerVolumeSpecName "kube-api-access-ghdt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.754757 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "516ab72d-be26-41a3-8f34-2fce0bf4febb" (UID: "516ab72d-be26-41a3-8f34-2fce0bf4febb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845285 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd531fc3-698c-405e-a687-153869b86ac1-client-ca\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845374 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd531fc3-698c-405e-a687-153869b86ac1-config\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845510 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-client-ca\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845679 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-proxy-ca-bundles\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845736 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzvxr\" (UniqueName: \"kubernetes.io/projected/dd531fc3-698c-405e-a687-153869b86ac1-kube-api-access-hzvxr\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845789 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-serving-cert\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.845864 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd531fc3-698c-405e-a687-153869b86ac1-serving-cert\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846031 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnqfv\" (UniqueName: \"kubernetes.io/projected/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-kube-api-access-xnqfv\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846072 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-config\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846131 4774 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846149 4774 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-client-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846163 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/223a69f9-6da6-49f6-8dc6-791fdb76a205-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846174 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghdt4\" (UniqueName: \"kubernetes.io/projected/223a69f9-6da6-49f6-8dc6-791fdb76a205-kube-api-access-ghdt4\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846183 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/223a69f9-6da6-49f6-8dc6-791fdb76a205-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846193 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846203 4774 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/516ab72d-be26-41a3-8f34-2fce0bf4febb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846212 4774 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/516ab72d-be26-41a3-8f34-2fce0bf4febb-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.846224 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbzhp\" (UniqueName: \"kubernetes.io/projected/516ab72d-be26-41a3-8f34-2fce0bf4febb-kube-api-access-zbzhp\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.947194 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnqfv\" (UniqueName: \"kubernetes.io/projected/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-kube-api-access-xnqfv\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.947776 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-config\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.947853 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd531fc3-698c-405e-a687-153869b86ac1-client-ca\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.947898 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd531fc3-698c-405e-a687-153869b86ac1-config\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.947922 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-client-ca\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.947948 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-proxy-ca-bundles\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.948162 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzvxr\" (UniqueName: \"kubernetes.io/projected/dd531fc3-698c-405e-a687-153869b86ac1-kube-api-access-hzvxr\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.948190 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-serving-cert\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.948216 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd531fc3-698c-405e-a687-153869b86ac1-serving-cert\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.949151 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/dd531fc3-698c-405e-a687-153869b86ac1-client-ca\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.949351 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd531fc3-698c-405e-a687-153869b86ac1-config\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.949397 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-client-ca\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.949890 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-config\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.949933 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-proxy-ca-bundles\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.952523 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd531fc3-698c-405e-a687-153869b86ac1-serving-cert\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.952911 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-serving-cert\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.966261 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnqfv\" (UniqueName: \"kubernetes.io/projected/2e62b2a3-5fe7-4a60-b05b-9e122154dbc4-kube-api-access-xnqfv\") pod \"controller-manager-69d6955dc-v9g4g\" (UID: \"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4\") " pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.966739 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzvxr\" (UniqueName: \"kubernetes.io/projected/dd531fc3-698c-405e-a687-153869b86ac1-kube-api-access-hzvxr\") pod \"route-controller-manager-7f9d9df89-hxm57\" (UID: \"dd531fc3-698c-405e-a687-153869b86ac1\") " pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:01 crc kubenswrapper[4774]: I1121 14:16:01.982775 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.019904 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.212587 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-69d6955dc-v9g4g"] Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.270337 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57"] Nov 21 14:16:02 crc kubenswrapper[4774]: W1121 14:16:02.277977 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd531fc3_698c_405e_a687_153869b86ac1.slice/crio-79e28f14c3274654992bd316d600bce312b7965727dea6d49646c6ede60b74c8 WatchSource:0}: Error finding container 79e28f14c3274654992bd316d600bce312b7965727dea6d49646c6ede60b74c8: Status 404 returned error can't find the container with id 79e28f14c3274654992bd316d600bce312b7965727dea6d49646c6ede60b74c8 Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.279962 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.280584 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d965l" event={"ID":"516ab72d-be26-41a3-8f34-2fce0bf4febb","Type":"ContainerDied","Data":"fc254fa6ef3295a648da0c92099f748825cc9087b2b0d21ee4f287323397321a"} Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.280680 4774 scope.go:117] "RemoveContainer" containerID="4e74fe91c6ab54033fd8833458567569d29f4b2c5b73edf0a016dea1912cd6e7" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.282734 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" event={"ID":"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4","Type":"ContainerStarted","Data":"77f602e2ace1e1903f10469dc9647d0c942bf32e866c55e9f714d9f2d25617c0"} Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.285747 4774 generic.go:334] "Generic (PLEG): container finished" podID="223a69f9-6da6-49f6-8dc6-791fdb76a205" containerID="a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36" exitCode=0 Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.285838 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.285861 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" event={"ID":"223a69f9-6da6-49f6-8dc6-791fdb76a205","Type":"ContainerDied","Data":"a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36"} Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.285899 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp" event={"ID":"223a69f9-6da6-49f6-8dc6-791fdb76a205","Type":"ContainerDied","Data":"5a859cbb874430ed40ff6c6adb091d23cf5a4087fff11e84f372d11af7eaa070"} Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.351249 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d965l"] Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.361044 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d965l"] Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.361395 4774 scope.go:117] "RemoveContainer" containerID="a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.374565 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp"] Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.376723 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-gtfqp"] Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.402676 4774 scope.go:117] "RemoveContainer" containerID="a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36" Nov 21 14:16:02 crc kubenswrapper[4774]: E1121 14:16:02.404114 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36\": container with ID starting with a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36 not found: ID does not exist" containerID="a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36" Nov 21 14:16:02 crc kubenswrapper[4774]: I1121 14:16:02.404224 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36"} err="failed to get container status \"a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36\": rpc error: code = NotFound desc = could not find container \"a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36\": container with ID starting with a74f6be03b9062f586714b63a68af9172248202ed0f440a1b28a23bfbf693d36 not found: ID does not exist" Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.301444 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" event={"ID":"dd531fc3-698c-405e-a687-153869b86ac1","Type":"ContainerStarted","Data":"568125de0790e257d56f81ee43eb60621ef58a0dac3695490f88fdc06e17af3f"} Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.301924 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.301950 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" event={"ID":"dd531fc3-698c-405e-a687-153869b86ac1","Type":"ContainerStarted","Data":"79e28f14c3274654992bd316d600bce312b7965727dea6d49646c6ede60b74c8"} Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.307491 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" event={"ID":"2e62b2a3-5fe7-4a60-b05b-9e122154dbc4","Type":"ContainerStarted","Data":"ccc0f828714e9812f9383ec960f72cdff74d8f78b204fd63380496a0a2de4995"} Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.307746 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.308436 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.313852 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.322072 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f9d9df89-hxm57" podStartSLOduration=2.322053929 podStartE2EDuration="2.322053929s" podCreationTimestamp="2025-11-21 14:16:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:16:03.319250474 +0000 UTC m=+753.971449733" watchObservedRunningTime="2025-11-21 14:16:03.322053929 +0000 UTC m=+753.974253188" Nov 21 14:16:03 crc kubenswrapper[4774]: I1121 14:16:03.341573 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-69d6955dc-v9g4g" podStartSLOduration=2.341544208 podStartE2EDuration="2.341544208s" podCreationTimestamp="2025-11-21 14:16:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:16:03.340911579 +0000 UTC m=+753.993110848" watchObservedRunningTime="2025-11-21 14:16:03.341544208 +0000 UTC m=+753.993743467" Nov 21 14:16:04 crc kubenswrapper[4774]: I1121 14:16:04.099460 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="223a69f9-6da6-49f6-8dc6-791fdb76a205" path="/var/lib/kubelet/pods/223a69f9-6da6-49f6-8dc6-791fdb76a205/volumes" Nov 21 14:16:04 crc kubenswrapper[4774]: I1121 14:16:04.100102 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="516ab72d-be26-41a3-8f34-2fce0bf4febb" path="/var/lib/kubelet/pods/516ab72d-be26-41a3-8f34-2fce0bf4febb/volumes" Nov 21 14:16:11 crc kubenswrapper[4774]: I1121 14:16:11.330229 4774 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 21 14:16:29 crc kubenswrapper[4774]: I1121 14:16:29.601154 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:16:29 crc kubenswrapper[4774]: I1121 14:16:29.601719 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:16:29 crc kubenswrapper[4774]: I1121 14:16:29.601764 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:16:29 crc kubenswrapper[4774]: I1121 14:16:29.602366 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bb913418006b884e0ea3b932253e39752b8814882b052669be5898cc2a7736b4"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:16:29 crc kubenswrapper[4774]: I1121 14:16:29.602425 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://bb913418006b884e0ea3b932253e39752b8814882b052669be5898cc2a7736b4" gracePeriod=600 Nov 21 14:16:30 crc kubenswrapper[4774]: I1121 14:16:30.463315 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="bb913418006b884e0ea3b932253e39752b8814882b052669be5898cc2a7736b4" exitCode=0 Nov 21 14:16:30 crc kubenswrapper[4774]: I1121 14:16:30.463403 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"bb913418006b884e0ea3b932253e39752b8814882b052669be5898cc2a7736b4"} Nov 21 14:16:30 crc kubenswrapper[4774]: I1121 14:16:30.463954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"60358f57ea897b7d0cc072aaadbd84c8627ffc28289a543329a6b20ec347a65d"} Nov 21 14:16:30 crc kubenswrapper[4774]: I1121 14:16:30.464000 4774 scope.go:117] "RemoveContainer" containerID="5c293a6fa454f16330a12afe7778312d3179ec19bd0774a3474cca046515ae32" Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.977120 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rltf4"] Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.978370 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-controller" containerID="cri-o://ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd" gracePeriod=30 Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.978862 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="sbdb" containerID="cri-o://37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59" gracePeriod=30 Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.978923 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="nbdb" containerID="cri-o://c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d" gracePeriod=30 Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.978956 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="northd" containerID="cri-o://a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8" gracePeriod=30 Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.978983 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be" gracePeriod=30 Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.979017 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-node" containerID="cri-o://64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af" gracePeriod=30 Nov 21 14:16:39 crc kubenswrapper[4774]: I1121 14:16:39.979052 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-acl-logging" containerID="cri-o://0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3" gracePeriod=30 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.017793 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" containerID="cri-o://4311dc642eeb3b7afa452271bbe04109b434f64f9a16c57967c8f8c498472c42" gracePeriod=30 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.526468 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/2.log" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.527806 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/1.log" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.527892 4774 generic.go:334] "Generic (PLEG): container finished" podID="0bf8b868-6e71-4073-a9ad-e2ac8ae15215" containerID="1c2299fa29bd8573db83cc8a8f6870ad02177ed430bfa9b38d82eda3dc9213d0" exitCode=2 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.527953 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerDied","Data":"1c2299fa29bd8573db83cc8a8f6870ad02177ed430bfa9b38d82eda3dc9213d0"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.528058 4774 scope.go:117] "RemoveContainer" containerID="2a46d1ab09af3a130d20e0b476d9b608e1d74cd85350b1e3b9b3e58d8c0a2ce8" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.528640 4774 scope.go:117] "RemoveContainer" containerID="1c2299fa29bd8573db83cc8a8f6870ad02177ed430bfa9b38d82eda3dc9213d0" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.535484 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovnkube-controller/3.log" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.538891 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovn-acl-logging/0.log" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.539724 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovn-controller/0.log" Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541101 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="4311dc642eeb3b7afa452271bbe04109b434f64f9a16c57967c8f8c498472c42" exitCode=0 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541141 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59" exitCode=0 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541150 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d" exitCode=0 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541159 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8" exitCode=0 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541181 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"4311dc642eeb3b7afa452271bbe04109b434f64f9a16c57967c8f8c498472c42"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541241 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541266 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541283 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541174 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be" exitCode=0 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541299 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541330 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541303 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af" exitCode=0 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541372 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3" exitCode=143 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541396 4774 generic.go:334] "Generic (PLEG): container finished" podID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerID="ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd" exitCode=143 Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541421 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.541437 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd"} Nov 21 14:16:40 crc kubenswrapper[4774]: I1121 14:16:40.566954 4774 scope.go:117] "RemoveContainer" containerID="c153d6de871c17af48616dddd43599cfc53e65aba608b7acd89fc169abff25e1" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.006072 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovn-acl-logging/0.log" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.008024 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovn-controller/0.log" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.008626 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.068928 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w45dl"] Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069170 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069183 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069193 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069201 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069212 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-acl-logging" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069220 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-acl-logging" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069229 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069237 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069255 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="sbdb" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069263 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="sbdb" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069275 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kubecfg-setup" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069283 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kubecfg-setup" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069290 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-ovn-metrics" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069298 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-ovn-metrics" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069314 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-node" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069321 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-node" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069332 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="northd" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069339 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="northd" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069350 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069357 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069367 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="nbdb" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069373 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="nbdb" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069478 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069494 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="sbdb" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069501 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069508 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-ovn-metrics" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069517 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069525 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069533 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069543 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069555 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="kube-rbac-proxy-node" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069567 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="nbdb" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069580 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovn-acl-logging" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069590 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="northd" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069706 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069717 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: E1121 14:16:41.069729 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.069737 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" containerName="ovnkube-controller" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.071882 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130515 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-systemd\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130584 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-script-lib\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130621 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-node-log\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130664 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-ovn-kubernetes\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130690 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-netd\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130759 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkrjk\" (UniqueName: \"kubernetes.io/projected/4057b5ee-926e-4931-b5a0-2c204d18ce72-kube-api-access-fkrjk\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130778 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-node-log" (OuterVolumeSpecName: "node-log") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130808 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-env-overrides\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130854 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-etc-openvswitch\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130846 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130877 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-bin\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130906 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-slash\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130931 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-openvswitch\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130969 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-ovn\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.130997 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-kubelet\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131019 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-config\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131048 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-var-lib-cni-networks-ovn-kubernetes\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131018 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131105 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131071 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131154 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131076 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-systemd-units\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131096 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-slash" (OuterVolumeSpecName: "host-slash") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131215 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131210 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-netns\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131238 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131166 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131139 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131340 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovn-node-metrics-cert\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131371 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-log-socket\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131384 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131394 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-var-lib-openvswitch\") pod \"4057b5ee-926e-4931-b5a0-2c204d18ce72\" (UID: \"4057b5ee-926e-4931-b5a0-2c204d18ce72\") " Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131433 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-log-socket" (OuterVolumeSpecName: "log-socket") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131542 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131641 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131750 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.131771 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132116 4774 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132150 4774 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132164 4774 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132180 4774 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132192 4774 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132203 4774 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132212 4774 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132224 4774 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-log-socket\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132233 4774 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132244 4774 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-node-log\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132257 4774 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132271 4774 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132281 4774 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4057b5ee-926e-4931-b5a0-2c204d18ce72-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132291 4774 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132300 4774 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132313 4774 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-host-slash\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.132323 4774 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.138708 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.139619 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4057b5ee-926e-4931-b5a0-2c204d18ce72-kube-api-access-fkrjk" (OuterVolumeSpecName: "kube-api-access-fkrjk") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "kube-api-access-fkrjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.147042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "4057b5ee-926e-4931-b5a0-2c204d18ce72" (UID: "4057b5ee-926e-4931-b5a0-2c204d18ce72"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.233592 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-slash\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234568 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-log-socket\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234607 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-node-log\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234630 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-cni-bin\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234657 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-ovnkube-script-lib\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234683 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-systemd-units\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.234982 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235092 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-env-overrides\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235200 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-systemd\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235395 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-ovnkube-config\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235543 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rz99\" (UniqueName: \"kubernetes.io/projected/8105b720-8b0d-454f-83b0-16c80ed55315-kube-api-access-2rz99\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235597 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8105b720-8b0d-454f-83b0-16c80ed55315-ovn-node-metrics-cert\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235737 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-run-ovn-kubernetes\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235804 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-var-lib-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.235945 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-run-netns\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236054 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-kubelet\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236123 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-etc-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236243 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-cni-netd\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236283 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-ovn\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236411 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4057b5ee-926e-4931-b5a0-2c204d18ce72-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236443 4774 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4057b5ee-926e-4931-b5a0-2c204d18ce72-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.236465 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkrjk\" (UniqueName: \"kubernetes.io/projected/4057b5ee-926e-4931-b5a0-2c204d18ce72-kube-api-access-fkrjk\") on node \"crc\" DevicePath \"\"" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337378 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-node-log\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337466 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-log-socket\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337493 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-ovnkube-script-lib\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337516 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-cni-bin\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337539 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-systemd-units\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337561 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337580 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337567 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-log-socket\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337597 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-env-overrides\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337714 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337750 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-systemd\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-systemd-units\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337806 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-cni-bin\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337814 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-ovnkube-config\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337880 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-systemd\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337886 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337938 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-node-log\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337954 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rz99\" (UniqueName: \"kubernetes.io/projected/8105b720-8b0d-454f-83b0-16c80ed55315-kube-api-access-2rz99\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.337992 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8105b720-8b0d-454f-83b0-16c80ed55315-ovn-node-metrics-cert\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338066 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-run-ovn-kubernetes\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338096 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-var-lib-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338141 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-run-netns\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338205 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-kubelet\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338225 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-etc-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338258 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-ovn\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338282 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-cni-netd\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338343 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-slash\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338416 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-env-overrides\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338438 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-slash\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338477 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-etc-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338482 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-kubelet\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338505 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-run-ovn\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338530 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-run-ovn-kubernetes\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338535 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-cni-netd\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338572 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-var-lib-openvswitch\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338583 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8105b720-8b0d-454f-83b0-16c80ed55315-host-run-netns\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.338743 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-ovnkube-config\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.339114 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8105b720-8b0d-454f-83b0-16c80ed55315-ovnkube-script-lib\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.343760 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8105b720-8b0d-454f-83b0-16c80ed55315-ovn-node-metrics-cert\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.360946 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rz99\" (UniqueName: \"kubernetes.io/projected/8105b720-8b0d-454f-83b0-16c80ed55315-kube-api-access-2rz99\") pod \"ovnkube-node-w45dl\" (UID: \"8105b720-8b0d-454f-83b0-16c80ed55315\") " pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.384729 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:41 crc kubenswrapper[4774]: W1121 14:16:41.405203 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8105b720_8b0d_454f_83b0_16c80ed55315.slice/crio-cdb2aeb1729ce9c5fa205a183a57a9cac101c6b0a4d88b97156a232610bcc78d WatchSource:0}: Error finding container cdb2aeb1729ce9c5fa205a183a57a9cac101c6b0a4d88b97156a232610bcc78d: Status 404 returned error can't find the container with id cdb2aeb1729ce9c5fa205a183a57a9cac101c6b0a4d88b97156a232610bcc78d Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.554305 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"5a00154d080694457ad26a6831418d0a90b789dcb6e73e04408fbab89b8f2aaa"} Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.554379 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"cdb2aeb1729ce9c5fa205a183a57a9cac101c6b0a4d88b97156a232610bcc78d"} Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.571030 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovn-acl-logging/0.log" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.572147 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-rltf4_4057b5ee-926e-4931-b5a0-2c204d18ce72/ovn-controller/0.log" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.580984 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" event={"ID":"4057b5ee-926e-4931-b5a0-2c204d18ce72","Type":"ContainerDied","Data":"0e668ad61bd274297738a655d5cce5b29ad65721908cfc53df45db709452ddf1"} Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.581065 4774 scope.go:117] "RemoveContainer" containerID="4311dc642eeb3b7afa452271bbe04109b434f64f9a16c57967c8f8c498472c42" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.581270 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-rltf4" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.591996 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hdxzw_0bf8b868-6e71-4073-a9ad-e2ac8ae15215/kube-multus/2.log" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.592157 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hdxzw" event={"ID":"0bf8b868-6e71-4073-a9ad-e2ac8ae15215","Type":"ContainerStarted","Data":"90f836f94d15aedee6e2372ec5407af53516820ac037d89579ebfca01705cf87"} Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.625004 4774 scope.go:117] "RemoveContainer" containerID="37f742b0c539bef6d1e7f78190525c43797d7ab12c7849d4c09e5c059c5c5a59" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.634858 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rltf4"] Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.642974 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-rltf4"] Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.653797 4774 scope.go:117] "RemoveContainer" containerID="c9c28b67bfb48090ae96112508ed3657e68f45e6d78090f8add808789d61b48d" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.681573 4774 scope.go:117] "RemoveContainer" containerID="a31fb50b9c60606a4d449ad664a9b89df5d70d35c69d3fa5160d8f9fb1b378d8" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.700843 4774 scope.go:117] "RemoveContainer" containerID="11d753bcd29100df019682f0a6d1cc73a9a40e5fddeb6f98539dcb58c3d794be" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.721159 4774 scope.go:117] "RemoveContainer" containerID="64681416682abd797ad0f958409054b6e73e329547eb49ed8631461e1cb778af" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.740575 4774 scope.go:117] "RemoveContainer" containerID="0a3fd5cd35119f35f5f1c21a7d5765db1ea6f106562465ed5e6402fe123d78e3" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.767848 4774 scope.go:117] "RemoveContainer" containerID="ef2479b0d66b2663212697dbca2339c82c307878087f7e4e7bca2e197c3153fd" Nov 21 14:16:41 crc kubenswrapper[4774]: I1121 14:16:41.784905 4774 scope.go:117] "RemoveContainer" containerID="788e58797a863f9b15606329c51208ae45a0e5e9613aa93b190c5e93d8d460e5" Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.122192 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4057b5ee-926e-4931-b5a0-2c204d18ce72" path="/var/lib/kubelet/pods/4057b5ee-926e-4931-b5a0-2c204d18ce72/volumes" Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603360 4774 generic.go:334] "Generic (PLEG): container finished" podID="8105b720-8b0d-454f-83b0-16c80ed55315" containerID="5a00154d080694457ad26a6831418d0a90b789dcb6e73e04408fbab89b8f2aaa" exitCode=0 Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603461 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerDied","Data":"5a00154d080694457ad26a6831418d0a90b789dcb6e73e04408fbab89b8f2aaa"} Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603896 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"35ba64586878b1cf8f78de5d1f405162791825a6407cf0f43cfe97efffb9a863"} Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603916 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"1e292d900e68a840d39edc2ab52df78d3bd03acaa940465a9767221959f22fe4"} Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"c43898ad58b68cc8eda23261da5c3dddf581a919432b390bf381887a6679e78a"} Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603942 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"432b94cb58866ec0bc9e2acd0c56ecde3d736f2247c421d7b3b013181d81d5c0"} Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603965 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"2461c02739f01ac5e45b1dbaacaacc7b3e3217cfc70bb5a0ea500a3fae036727"} Nov 21 14:16:42 crc kubenswrapper[4774]: I1121 14:16:42.603978 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"890ceaad39ccb1bf19b471102ff2936d71b21e3fd4fdc4ce89be8360bcb7447a"} Nov 21 14:16:45 crc kubenswrapper[4774]: I1121 14:16:45.623557 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"a272ed7c501652f33828f9b08d22f607bd2a373be9bf20170de9caf730940b72"} Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.449653 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-lkp8t"] Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.451019 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.453942 4774 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-2vsdx" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.455377 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.455538 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.455561 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.535853 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvqk6\" (UniqueName: \"kubernetes.io/projected/6618c892-4191-4a4c-86bb-00750a4be8b7-kube-api-access-fvqk6\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.535937 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6618c892-4191-4a4c-86bb-00750a4be8b7-crc-storage\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.536020 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6618c892-4191-4a4c-86bb-00750a4be8b7-node-mnt\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.637730 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvqk6\" (UniqueName: \"kubernetes.io/projected/6618c892-4191-4a4c-86bb-00750a4be8b7-kube-api-access-fvqk6\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.637804 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6618c892-4191-4a4c-86bb-00750a4be8b7-crc-storage\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.637897 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6618c892-4191-4a4c-86bb-00750a4be8b7-node-mnt\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.638372 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6618c892-4191-4a4c-86bb-00750a4be8b7-node-mnt\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.638882 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6618c892-4191-4a4c-86bb-00750a4be8b7-crc-storage\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.641608 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" event={"ID":"8105b720-8b0d-454f-83b0-16c80ed55315","Type":"ContainerStarted","Data":"aae48454ff5fb45ac1102ad1676ead9bd5cfa016f411eb99c0438970121ec24d"} Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.642020 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.642091 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.642107 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.665542 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvqk6\" (UniqueName: \"kubernetes.io/projected/6618c892-4191-4a4c-86bb-00750a4be8b7-kube-api-access-fvqk6\") pod \"crc-storage-crc-lkp8t\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.674524 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" podStartSLOduration=6.674501627 podStartE2EDuration="6.674501627s" podCreationTimestamp="2025-11-21 14:16:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:16:47.66874918 +0000 UTC m=+798.320948459" watchObservedRunningTime="2025-11-21 14:16:47.674501627 +0000 UTC m=+798.326700886" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.681274 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.689770 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:16:47 crc kubenswrapper[4774]: I1121 14:16:47.769964 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: E1121 14:16:47.804006 4774 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(ecc35a817f8b5435b808f7b1c576831fd77762ee3956ec8dd348b2a7a840e5db): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 14:16:47 crc kubenswrapper[4774]: E1121 14:16:47.804118 4774 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(ecc35a817f8b5435b808f7b1c576831fd77762ee3956ec8dd348b2a7a840e5db): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: E1121 14:16:47.804148 4774 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(ecc35a817f8b5435b808f7b1c576831fd77762ee3956ec8dd348b2a7a840e5db): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:47 crc kubenswrapper[4774]: E1121 14:16:47.804210 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-lkp8t_crc-storage(6618c892-4191-4a4c-86bb-00750a4be8b7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-lkp8t_crc-storage(6618c892-4191-4a4c-86bb-00750a4be8b7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(ecc35a817f8b5435b808f7b1c576831fd77762ee3956ec8dd348b2a7a840e5db): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-lkp8t" podUID="6618c892-4191-4a4c-86bb-00750a4be8b7" Nov 21 14:16:49 crc kubenswrapper[4774]: I1121 14:16:49.865799 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-lkp8t"] Nov 21 14:16:49 crc kubenswrapper[4774]: I1121 14:16:49.866458 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:49 crc kubenswrapper[4774]: I1121 14:16:49.867103 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:49 crc kubenswrapper[4774]: E1121 14:16:49.899166 4774 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(c00a3b2cb59cd27b51fdd431b15060ad1df78217f7b8216caf753bb38db5d81c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 21 14:16:49 crc kubenswrapper[4774]: E1121 14:16:49.899474 4774 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(c00a3b2cb59cd27b51fdd431b15060ad1df78217f7b8216caf753bb38db5d81c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:49 crc kubenswrapper[4774]: E1121 14:16:49.899508 4774 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(c00a3b2cb59cd27b51fdd431b15060ad1df78217f7b8216caf753bb38db5d81c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:16:49 crc kubenswrapper[4774]: E1121 14:16:49.899605 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"crc-storage-crc-lkp8t_crc-storage(6618c892-4191-4a4c-86bb-00750a4be8b7)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"crc-storage-crc-lkp8t_crc-storage(6618c892-4191-4a4c-86bb-00750a4be8b7)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_crc-storage-crc-lkp8t_crc-storage_6618c892-4191-4a4c-86bb-00750a4be8b7_0(c00a3b2cb59cd27b51fdd431b15060ad1df78217f7b8216caf753bb38db5d81c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="crc-storage/crc-storage-crc-lkp8t" podUID="6618c892-4191-4a4c-86bb-00750a4be8b7" Nov 21 14:17:03 crc kubenswrapper[4774]: I1121 14:17:03.093288 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:17:03 crc kubenswrapper[4774]: I1121 14:17:03.095865 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:17:03 crc kubenswrapper[4774]: I1121 14:17:03.511221 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-lkp8t"] Nov 21 14:17:03 crc kubenswrapper[4774]: I1121 14:17:03.523874 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:17:03 crc kubenswrapper[4774]: I1121 14:17:03.730748 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lkp8t" event={"ID":"6618c892-4191-4a4c-86bb-00750a4be8b7","Type":"ContainerStarted","Data":"36a85d02ea936af3d7d9684eed63815056938a5a8b7644d00b8765b46c608e3a"} Nov 21 14:17:05 crc kubenswrapper[4774]: I1121 14:17:05.743147 4774 generic.go:334] "Generic (PLEG): container finished" podID="6618c892-4191-4a4c-86bb-00750a4be8b7" containerID="d3191b2621bdbd561d3965cf16b6dc23c4cf65be6a7481c15b42d2cdb8991b50" exitCode=0 Nov 21 14:17:05 crc kubenswrapper[4774]: I1121 14:17:05.743228 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lkp8t" event={"ID":"6618c892-4191-4a4c-86bb-00750a4be8b7","Type":"ContainerDied","Data":"d3191b2621bdbd561d3965cf16b6dc23c4cf65be6a7481c15b42d2cdb8991b50"} Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.000763 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.152282 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6618c892-4191-4a4c-86bb-00750a4be8b7-crc-storage\") pod \"6618c892-4191-4a4c-86bb-00750a4be8b7\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.152404 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6618c892-4191-4a4c-86bb-00750a4be8b7-node-mnt\") pod \"6618c892-4191-4a4c-86bb-00750a4be8b7\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.152464 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvqk6\" (UniqueName: \"kubernetes.io/projected/6618c892-4191-4a4c-86bb-00750a4be8b7-kube-api-access-fvqk6\") pod \"6618c892-4191-4a4c-86bb-00750a4be8b7\" (UID: \"6618c892-4191-4a4c-86bb-00750a4be8b7\") " Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.152690 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6618c892-4191-4a4c-86bb-00750a4be8b7-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "6618c892-4191-4a4c-86bb-00750a4be8b7" (UID: "6618c892-4191-4a4c-86bb-00750a4be8b7"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.154498 4774 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6618c892-4191-4a4c-86bb-00750a4be8b7-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.159747 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6618c892-4191-4a4c-86bb-00750a4be8b7-kube-api-access-fvqk6" (OuterVolumeSpecName: "kube-api-access-fvqk6") pod "6618c892-4191-4a4c-86bb-00750a4be8b7" (UID: "6618c892-4191-4a4c-86bb-00750a4be8b7"). InnerVolumeSpecName "kube-api-access-fvqk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.168662 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6618c892-4191-4a4c-86bb-00750a4be8b7-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "6618c892-4191-4a4c-86bb-00750a4be8b7" (UID: "6618c892-4191-4a4c-86bb-00750a4be8b7"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.256034 4774 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6618c892-4191-4a4c-86bb-00750a4be8b7-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.256085 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvqk6\" (UniqueName: \"kubernetes.io/projected/6618c892-4191-4a4c-86bb-00750a4be8b7-kube-api-access-fvqk6\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.757977 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-lkp8t" event={"ID":"6618c892-4191-4a4c-86bb-00750a4be8b7","Type":"ContainerDied","Data":"36a85d02ea936af3d7d9684eed63815056938a5a8b7644d00b8765b46c608e3a"} Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.758019 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36a85d02ea936af3d7d9684eed63815056938a5a8b7644d00b8765b46c608e3a" Nov 21 14:17:07 crc kubenswrapper[4774]: I1121 14:17:07.758038 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-lkp8t" Nov 21 14:17:11 crc kubenswrapper[4774]: I1121 14:17:11.410215 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w45dl" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.655231 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj"] Nov 21 14:17:14 crc kubenswrapper[4774]: E1121 14:17:14.656000 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6618c892-4191-4a4c-86bb-00750a4be8b7" containerName="storage" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.656019 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6618c892-4191-4a4c-86bb-00750a4be8b7" containerName="storage" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.656122 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6618c892-4191-4a4c-86bb-00750a4be8b7" containerName="storage" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.656980 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.659066 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.665209 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj"] Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.765297 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.765354 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.765379 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lnsx\" (UniqueName: \"kubernetes.io/projected/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-kube-api-access-4lnsx\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.866423 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.866481 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.866513 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lnsx\" (UniqueName: \"kubernetes.io/projected/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-kube-api-access-4lnsx\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.866984 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-util\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.867588 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-bundle\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.898070 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lnsx\" (UniqueName: \"kubernetes.io/projected/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-kube-api-access-4lnsx\") pod \"5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:14 crc kubenswrapper[4774]: I1121 14:17:14.978607 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:15 crc kubenswrapper[4774]: I1121 14:17:15.252086 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj"] Nov 21 14:17:15 crc kubenswrapper[4774]: I1121 14:17:15.813263 4774 generic.go:334] "Generic (PLEG): container finished" podID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerID="2652fad5cb9454d8d59415351c5589a62b782fa2c674e40cdaeeb8d6be1007e6" exitCode=0 Nov 21 14:17:15 crc kubenswrapper[4774]: I1121 14:17:15.813326 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" event={"ID":"aeb69fbb-0e79-498d-87c8-6b2bcef5607f","Type":"ContainerDied","Data":"2652fad5cb9454d8d59415351c5589a62b782fa2c674e40cdaeeb8d6be1007e6"} Nov 21 14:17:15 crc kubenswrapper[4774]: I1121 14:17:15.813361 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" event={"ID":"aeb69fbb-0e79-498d-87c8-6b2bcef5607f","Type":"ContainerStarted","Data":"b47aa8437fd9fca6308bec26445e667d6a05fa74e31ce3312d6e34dc90048197"} Nov 21 14:17:16 crc kubenswrapper[4774]: I1121 14:17:16.893681 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-w8mpv"] Nov 21 14:17:16 crc kubenswrapper[4774]: I1121 14:17:16.896626 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:16 crc kubenswrapper[4774]: I1121 14:17:16.903794 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w8mpv"] Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.001599 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-catalog-content\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.001740 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-utilities\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.001811 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz6tw\" (UniqueName: \"kubernetes.io/projected/4f0c3157-9bc6-47a6-a645-15604038f88f-kube-api-access-vz6tw\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.103471 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-catalog-content\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.103607 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-utilities\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.103659 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz6tw\" (UniqueName: \"kubernetes.io/projected/4f0c3157-9bc6-47a6-a645-15604038f88f-kube-api-access-vz6tw\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.104113 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-utilities\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.104111 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-catalog-content\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.136278 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz6tw\" (UniqueName: \"kubernetes.io/projected/4f0c3157-9bc6-47a6-a645-15604038f88f-kube-api-access-vz6tw\") pod \"redhat-operators-w8mpv\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.213620 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.682410 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-w8mpv"] Nov 21 14:17:17 crc kubenswrapper[4774]: W1121 14:17:17.696036 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f0c3157_9bc6_47a6_a645_15604038f88f.slice/crio-9472efda0427bd6e3686f2e498bea7a072e7286aee7032b07a806bba52d95518 WatchSource:0}: Error finding container 9472efda0427bd6e3686f2e498bea7a072e7286aee7032b07a806bba52d95518: Status 404 returned error can't find the container with id 9472efda0427bd6e3686f2e498bea7a072e7286aee7032b07a806bba52d95518 Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.825487 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerStarted","Data":"9472efda0427bd6e3686f2e498bea7a072e7286aee7032b07a806bba52d95518"} Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.828174 4774 generic.go:334] "Generic (PLEG): container finished" podID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerID="68ac795ad86b9b7e488d283e0e2ac6e9f8ae7610c834abd5e63c8951570bdabc" exitCode=0 Nov 21 14:17:17 crc kubenswrapper[4774]: I1121 14:17:17.828223 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" event={"ID":"aeb69fbb-0e79-498d-87c8-6b2bcef5607f","Type":"ContainerDied","Data":"68ac795ad86b9b7e488d283e0e2ac6e9f8ae7610c834abd5e63c8951570bdabc"} Nov 21 14:17:18 crc kubenswrapper[4774]: I1121 14:17:18.835884 4774 generic.go:334] "Generic (PLEG): container finished" podID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerID="d9b19b27d6c549c7c649a04875456464499800e86b0995dddae9d9a4728b4cff" exitCode=0 Nov 21 14:17:18 crc kubenswrapper[4774]: I1121 14:17:18.836014 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" event={"ID":"aeb69fbb-0e79-498d-87c8-6b2bcef5607f","Type":"ContainerDied","Data":"d9b19b27d6c549c7c649a04875456464499800e86b0995dddae9d9a4728b4cff"} Nov 21 14:17:18 crc kubenswrapper[4774]: I1121 14:17:18.839712 4774 generic.go:334] "Generic (PLEG): container finished" podID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerID="2a7d46fbc1d640532ff1463b29904841bd5a3dd33559cdcb7f06f8d5e05d3829" exitCode=0 Nov 21 14:17:18 crc kubenswrapper[4774]: I1121 14:17:18.839770 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerDied","Data":"2a7d46fbc1d640532ff1463b29904841bd5a3dd33559cdcb7f06f8d5e05d3829"} Nov 21 14:17:19 crc kubenswrapper[4774]: I1121 14:17:19.850545 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerStarted","Data":"563c1fdb84ad734427c7f7d45fd8757debbf40ce4d8f257e3438b781445a989d"} Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.131617 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.250806 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-bundle\") pod \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.251550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lnsx\" (UniqueName: \"kubernetes.io/projected/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-kube-api-access-4lnsx\") pod \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.251606 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-util\") pod \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\" (UID: \"aeb69fbb-0e79-498d-87c8-6b2bcef5607f\") " Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.251806 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-bundle" (OuterVolumeSpecName: "bundle") pod "aeb69fbb-0e79-498d-87c8-6b2bcef5607f" (UID: "aeb69fbb-0e79-498d-87c8-6b2bcef5607f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.259163 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-kube-api-access-4lnsx" (OuterVolumeSpecName: "kube-api-access-4lnsx") pod "aeb69fbb-0e79-498d-87c8-6b2bcef5607f" (UID: "aeb69fbb-0e79-498d-87c8-6b2bcef5607f"). InnerVolumeSpecName "kube-api-access-4lnsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.271321 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-util" (OuterVolumeSpecName: "util") pod "aeb69fbb-0e79-498d-87c8-6b2bcef5607f" (UID: "aeb69fbb-0e79-498d-87c8-6b2bcef5607f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.353388 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lnsx\" (UniqueName: \"kubernetes.io/projected/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-kube-api-access-4lnsx\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.353433 4774 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-util\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.353444 4774 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/aeb69fbb-0e79-498d-87c8-6b2bcef5607f-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.859190 4774 generic.go:334] "Generic (PLEG): container finished" podID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerID="563c1fdb84ad734427c7f7d45fd8757debbf40ce4d8f257e3438b781445a989d" exitCode=0 Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.859306 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerDied","Data":"563c1fdb84ad734427c7f7d45fd8757debbf40ce4d8f257e3438b781445a989d"} Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.862523 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" event={"ID":"aeb69fbb-0e79-498d-87c8-6b2bcef5607f","Type":"ContainerDied","Data":"b47aa8437fd9fca6308bec26445e667d6a05fa74e31ce3312d6e34dc90048197"} Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.862564 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b47aa8437fd9fca6308bec26445e667d6a05fa74e31ce3312d6e34dc90048197" Nov 21 14:17:20 crc kubenswrapper[4774]: I1121 14:17:20.862596 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj" Nov 21 14:17:22 crc kubenswrapper[4774]: I1121 14:17:22.879574 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerStarted","Data":"ce14e8e5d598d2d1f93a848c35472de6d2c2f689d57f6daa507d2aabb3d68e17"} Nov 21 14:17:22 crc kubenswrapper[4774]: I1121 14:17:22.898158 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-w8mpv" podStartSLOduration=3.91212186 podStartE2EDuration="6.898128459s" podCreationTimestamp="2025-11-21 14:17:16 +0000 UTC" firstStartedPulling="2025-11-21 14:17:18.841522839 +0000 UTC m=+829.493722098" lastFinishedPulling="2025-11-21 14:17:21.827529438 +0000 UTC m=+832.479728697" observedRunningTime="2025-11-21 14:17:22.896270865 +0000 UTC m=+833.548470134" watchObservedRunningTime="2025-11-21 14:17:22.898128459 +0000 UTC m=+833.550327718" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.037358 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-889z8"] Nov 21 14:17:25 crc kubenswrapper[4774]: E1121 14:17:25.037691 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="pull" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.037742 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="pull" Nov 21 14:17:25 crc kubenswrapper[4774]: E1121 14:17:25.037752 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="extract" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.037758 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="extract" Nov 21 14:17:25 crc kubenswrapper[4774]: E1121 14:17:25.037766 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="util" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.037774 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="util" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.037894 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb69fbb-0e79-498d-87c8-6b2bcef5607f" containerName="extract" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.038357 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.042518 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-vzcp2" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.042641 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.044362 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.052416 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-889z8"] Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.124376 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f65m\" (UniqueName: \"kubernetes.io/projected/7f7c6f72-ef8d-4c52-8ed9-2d37c82733be-kube-api-access-4f65m\") pod \"nmstate-operator-557fdffb88-889z8\" (UID: \"7f7c6f72-ef8d-4c52-8ed9-2d37c82733be\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.226181 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f65m\" (UniqueName: \"kubernetes.io/projected/7f7c6f72-ef8d-4c52-8ed9-2d37c82733be-kube-api-access-4f65m\") pod \"nmstate-operator-557fdffb88-889z8\" (UID: \"7f7c6f72-ef8d-4c52-8ed9-2d37c82733be\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.246418 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f65m\" (UniqueName: \"kubernetes.io/projected/7f7c6f72-ef8d-4c52-8ed9-2d37c82733be-kube-api-access-4f65m\") pod \"nmstate-operator-557fdffb88-889z8\" (UID: \"7f7c6f72-ef8d-4c52-8ed9-2d37c82733be\") " pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.363498 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.631801 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-557fdffb88-889z8"] Nov 21 14:17:25 crc kubenswrapper[4774]: I1121 14:17:25.903748 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" event={"ID":"7f7c6f72-ef8d-4c52-8ed9-2d37c82733be","Type":"ContainerStarted","Data":"d1fd96743b13edacd27d880ec441ef6702bf5ada26b83be8ffb090bebd4a996f"} Nov 21 14:17:27 crc kubenswrapper[4774]: I1121 14:17:27.214073 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:27 crc kubenswrapper[4774]: I1121 14:17:27.214175 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:28 crc kubenswrapper[4774]: I1121 14:17:28.269479 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-w8mpv" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="registry-server" probeResult="failure" output=< Nov 21 14:17:28 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:17:28 crc kubenswrapper[4774]: > Nov 21 14:17:28 crc kubenswrapper[4774]: I1121 14:17:28.929028 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" event={"ID":"7f7c6f72-ef8d-4c52-8ed9-2d37c82733be","Type":"ContainerStarted","Data":"1729c3ef815aef3892de34af1f507d84bb814413590881b21f1bd37be0771f56"} Nov 21 14:17:28 crc kubenswrapper[4774]: I1121 14:17:28.951036 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-557fdffb88-889z8" podStartSLOduration=1.411451953 podStartE2EDuration="3.951012929s" podCreationTimestamp="2025-11-21 14:17:25 +0000 UTC" firstStartedPulling="2025-11-21 14:17:25.658728489 +0000 UTC m=+836.310927748" lastFinishedPulling="2025-11-21 14:17:28.198289465 +0000 UTC m=+838.850488724" observedRunningTime="2025-11-21 14:17:28.948257649 +0000 UTC m=+839.600456908" watchObservedRunningTime="2025-11-21 14:17:28.951012929 +0000 UTC m=+839.603212188" Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.934252 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j"] Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.936840 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.947628 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j"] Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.961029 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg"] Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.962096 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.964628 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-hqmrf" Nov 21 14:17:34 crc kubenswrapper[4774]: I1121 14:17:34.965467 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.004133 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.011594 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-wwdp7"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.012643 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102262 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3eb78967-8171-4bec-8eac-616d427e4a8a-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-cpslg\" (UID: \"3eb78967-8171-4bec-8eac-616d427e4a8a\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102337 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgslj\" (UniqueName: \"kubernetes.io/projected/d1402901-5738-45cc-8122-8a6f0b711e7c-kube-api-access-cgslj\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102438 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-nmstate-lock\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102512 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-dbus-socket\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102537 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zttf9\" (UniqueName: \"kubernetes.io/projected/6a94325f-9054-4c04-a0fa-64490ec11e50-kube-api-access-zttf9\") pod \"nmstate-metrics-5dcf9c57c5-85d2j\" (UID: \"6a94325f-9054-4c04-a0fa-64490ec11e50\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102576 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfgdq\" (UniqueName: \"kubernetes.io/projected/3eb78967-8171-4bec-8eac-616d427e4a8a-kube-api-access-nfgdq\") pod \"nmstate-webhook-6b89b748d8-cpslg\" (UID: \"3eb78967-8171-4bec-8eac-616d427e4a8a\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.102608 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-ovs-socket\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.164392 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.165556 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.169162 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-s7w8d" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.169192 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.172319 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.203836 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3eb78967-8171-4bec-8eac-616d427e4a8a-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-cpslg\" (UID: \"3eb78967-8171-4bec-8eac-616d427e4a8a\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.203911 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgslj\" (UniqueName: \"kubernetes.io/projected/d1402901-5738-45cc-8122-8a6f0b711e7c-kube-api-access-cgslj\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.203952 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-nmstate-lock\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204016 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-dbus-socket\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204045 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zttf9\" (UniqueName: \"kubernetes.io/projected/6a94325f-9054-4c04-a0fa-64490ec11e50-kube-api-access-zttf9\") pod \"nmstate-metrics-5dcf9c57c5-85d2j\" (UID: \"6a94325f-9054-4c04-a0fa-64490ec11e50\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204098 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfgdq\" (UniqueName: \"kubernetes.io/projected/3eb78967-8171-4bec-8eac-616d427e4a8a-kube-api-access-nfgdq\") pod \"nmstate-webhook-6b89b748d8-cpslg\" (UID: \"3eb78967-8171-4bec-8eac-616d427e4a8a\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204143 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-ovs-socket\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204148 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-nmstate-lock\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204254 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-ovs-socket\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.204373 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/d1402901-5738-45cc-8122-8a6f0b711e7c-dbus-socket\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.225195 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/3eb78967-8171-4bec-8eac-616d427e4a8a-tls-key-pair\") pod \"nmstate-webhook-6b89b748d8-cpslg\" (UID: \"3eb78967-8171-4bec-8eac-616d427e4a8a\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.233173 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfgdq\" (UniqueName: \"kubernetes.io/projected/3eb78967-8171-4bec-8eac-616d427e4a8a-kube-api-access-nfgdq\") pod \"nmstate-webhook-6b89b748d8-cpslg\" (UID: \"3eb78967-8171-4bec-8eac-616d427e4a8a\") " pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.234660 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.242810 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zttf9\" (UniqueName: \"kubernetes.io/projected/6a94325f-9054-4c04-a0fa-64490ec11e50-kube-api-access-zttf9\") pod \"nmstate-metrics-5dcf9c57c5-85d2j\" (UID: \"6a94325f-9054-4c04-a0fa-64490ec11e50\") " pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.255741 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgslj\" (UniqueName: \"kubernetes.io/projected/d1402901-5738-45cc-8122-8a6f0b711e7c-kube-api-access-cgslj\") pod \"nmstate-handler-wwdp7\" (UID: \"d1402901-5738-45cc-8122-8a6f0b711e7c\") " pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.267208 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.308548 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcdsh\" (UniqueName: \"kubernetes.io/projected/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-kube-api-access-qcdsh\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.309033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.309069 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.309289 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.345145 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:35 crc kubenswrapper[4774]: W1121 14:17:35.389487 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1402901_5738_45cc_8122_8a6f0b711e7c.slice/crio-81a07fbc6d17a7b07ab4fad19fda8eacfb51c53b48bb580e3ebfdb42cdc9067f WatchSource:0}: Error finding container 81a07fbc6d17a7b07ab4fad19fda8eacfb51c53b48bb580e3ebfdb42cdc9067f: Status 404 returned error can't find the container with id 81a07fbc6d17a7b07ab4fad19fda8eacfb51c53b48bb580e3ebfdb42cdc9067f Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.409985 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcdsh\" (UniqueName: \"kubernetes.io/projected/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-kube-api-access-qcdsh\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.410055 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.410082 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.411168 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-nginx-conf\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.417113 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-plugin-serving-cert\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.438789 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-9cffd6b86-qcnwd"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.440314 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.443549 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcdsh\" (UniqueName: \"kubernetes.io/projected/a49e02ad-f5a9-45a8-b1e5-b688d18383b5-kube-api-access-qcdsh\") pod \"nmstate-console-plugin-5874bd7bc5-vk5n6\" (UID: \"a49e02ad-f5a9-45a8-b1e5-b688d18383b5\") " pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.464410 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-9cffd6b86-qcnwd"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.479529 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.614236 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/634e1757-6f47-4bd3-94c1-127f8731f129-console-serving-cert\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.614814 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-trusted-ca-bundle\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.614866 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-console-config\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.614917 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-service-ca\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.615141 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2csj\" (UniqueName: \"kubernetes.io/projected/634e1757-6f47-4bd3-94c1-127f8731f129-kube-api-access-m2csj\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.615288 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-oauth-serving-cert\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.615453 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/634e1757-6f47-4bd3-94c1-127f8731f129-console-oauth-config\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717408 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/634e1757-6f47-4bd3-94c1-127f8731f129-console-serving-cert\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717472 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-trusted-ca-bundle\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717499 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-console-config\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717549 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-service-ca\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717580 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2csj\" (UniqueName: \"kubernetes.io/projected/634e1757-6f47-4bd3-94c1-127f8731f129-kube-api-access-m2csj\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717616 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-oauth-serving-cert\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.717683 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/634e1757-6f47-4bd3-94c1-127f8731f129-console-oauth-config\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.720233 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-service-ca\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.720444 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-console-config\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.721128 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-oauth-serving-cert\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.728039 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/634e1757-6f47-4bd3-94c1-127f8731f129-console-oauth-config\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.729127 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/634e1757-6f47-4bd3-94c1-127f8731f129-trusted-ca-bundle\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.731803 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/634e1757-6f47-4bd3-94c1-127f8731f129-console-serving-cert\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.743176 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2csj\" (UniqueName: \"kubernetes.io/projected/634e1757-6f47-4bd3-94c1-127f8731f129-kube-api-access-m2csj\") pod \"console-9cffd6b86-qcnwd\" (UID: \"634e1757-6f47-4bd3-94c1-127f8731f129\") " pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.772865 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.791722 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.839939 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j"] Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.882159 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg"] Nov 21 14:17:35 crc kubenswrapper[4774]: W1121 14:17:35.913811 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3eb78967_8171_4bec_8eac_616d427e4a8a.slice/crio-1d1abc9c337487abcf52ffbe551ce7fff3247ce4d6aac37a7fadcf637b32aca2 WatchSource:0}: Error finding container 1d1abc9c337487abcf52ffbe551ce7fff3247ce4d6aac37a7fadcf637b32aca2: Status 404 returned error can't find the container with id 1d1abc9c337487abcf52ffbe551ce7fff3247ce4d6aac37a7fadcf637b32aca2 Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.973588 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wwdp7" event={"ID":"d1402901-5738-45cc-8122-8a6f0b711e7c","Type":"ContainerStarted","Data":"81a07fbc6d17a7b07ab4fad19fda8eacfb51c53b48bb580e3ebfdb42cdc9067f"} Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.974801 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" event={"ID":"a49e02ad-f5a9-45a8-b1e5-b688d18383b5","Type":"ContainerStarted","Data":"2ca5e7be563319c534e9f43fc46aa80fe7be487962e1e1bf0f319b523fa2500d"} Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.975603 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" event={"ID":"3eb78967-8171-4bec-8eac-616d427e4a8a","Type":"ContainerStarted","Data":"1d1abc9c337487abcf52ffbe551ce7fff3247ce4d6aac37a7fadcf637b32aca2"} Nov 21 14:17:35 crc kubenswrapper[4774]: I1121 14:17:35.976377 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" event={"ID":"6a94325f-9054-4c04-a0fa-64490ec11e50","Type":"ContainerStarted","Data":"df43a6a37baa7f3e178db83500d9c5a3c17e3c0fede6127e2cf91d33485df877"} Nov 21 14:17:36 crc kubenswrapper[4774]: I1121 14:17:36.211627 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-9cffd6b86-qcnwd"] Nov 21 14:17:36 crc kubenswrapper[4774]: W1121 14:17:36.218757 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod634e1757_6f47_4bd3_94c1_127f8731f129.slice/crio-18da001d195444fc88292fa4d6f1cedff7e869487c5b9d580b797c3e6a84d902 WatchSource:0}: Error finding container 18da001d195444fc88292fa4d6f1cedff7e869487c5b9d580b797c3e6a84d902: Status 404 returned error can't find the container with id 18da001d195444fc88292fa4d6f1cedff7e869487c5b9d580b797c3e6a84d902 Nov 21 14:17:36 crc kubenswrapper[4774]: I1121 14:17:36.986734 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-9cffd6b86-qcnwd" event={"ID":"634e1757-6f47-4bd3-94c1-127f8731f129","Type":"ContainerStarted","Data":"e793ee00143217ee7c504f0dee2c17097146071d571ca4daa9326be4d1d124a4"} Nov 21 14:17:36 crc kubenswrapper[4774]: I1121 14:17:36.987295 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-9cffd6b86-qcnwd" event={"ID":"634e1757-6f47-4bd3-94c1-127f8731f129","Type":"ContainerStarted","Data":"18da001d195444fc88292fa4d6f1cedff7e869487c5b9d580b797c3e6a84d902"} Nov 21 14:17:37 crc kubenswrapper[4774]: I1121 14:17:37.011340 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-9cffd6b86-qcnwd" podStartSLOduration=2.011291355 podStartE2EDuration="2.011291355s" podCreationTimestamp="2025-11-21 14:17:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:17:37.008228647 +0000 UTC m=+847.660427906" watchObservedRunningTime="2025-11-21 14:17:37.011291355 +0000 UTC m=+847.663490614" Nov 21 14:17:37 crc kubenswrapper[4774]: I1121 14:17:37.272128 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:37 crc kubenswrapper[4774]: I1121 14:17:37.337198 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.002097 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" event={"ID":"6a94325f-9054-4c04-a0fa-64490ec11e50","Type":"ContainerStarted","Data":"0e5898783509e3e6140736409b304fe88bc4048172b2043d3620918172041ac1"} Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.003872 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-wwdp7" event={"ID":"d1402901-5738-45cc-8122-8a6f0b711e7c","Type":"ContainerStarted","Data":"166798aaa26c05e4da5403b05a8cf22ce4cac04cdc6d44a91c9c9a418be807c5"} Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.004057 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.006132 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" event={"ID":"a49e02ad-f5a9-45a8-b1e5-b688d18383b5","Type":"ContainerStarted","Data":"0ffcdabd55d42dc78f8fbd733d184ebffe1efae09777d1d691d3c4e459beb4eb"} Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.007868 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" event={"ID":"3eb78967-8171-4bec-8eac-616d427e4a8a","Type":"ContainerStarted","Data":"f8a45c7efc17f3d1854b8a0edcd001da9f72cd306028b1c4dfb6959c6cebbf23"} Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.008053 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.021864 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-wwdp7" podStartSLOduration=1.787195469 podStartE2EDuration="5.021834724s" podCreationTimestamp="2025-11-21 14:17:34 +0000 UTC" firstStartedPulling="2025-11-21 14:17:35.393464052 +0000 UTC m=+846.045663311" lastFinishedPulling="2025-11-21 14:17:38.628103307 +0000 UTC m=+849.280302566" observedRunningTime="2025-11-21 14:17:39.021613167 +0000 UTC m=+849.673812426" watchObservedRunningTime="2025-11-21 14:17:39.021834724 +0000 UTC m=+849.674033983" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.040496 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5874bd7bc5-vk5n6" podStartSLOduration=1.218990514 podStartE2EDuration="4.040443306s" podCreationTimestamp="2025-11-21 14:17:35 +0000 UTC" firstStartedPulling="2025-11-21 14:17:35.805105901 +0000 UTC m=+846.457305170" lastFinishedPulling="2025-11-21 14:17:38.626558703 +0000 UTC m=+849.278757962" observedRunningTime="2025-11-21 14:17:39.036566085 +0000 UTC m=+849.688765344" watchObservedRunningTime="2025-11-21 14:17:39.040443306 +0000 UTC m=+849.692642565" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.065360 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" podStartSLOduration=2.328667722 podStartE2EDuration="5.065328548s" podCreationTimestamp="2025-11-21 14:17:34 +0000 UTC" firstStartedPulling="2025-11-21 14:17:35.919446863 +0000 UTC m=+846.571646112" lastFinishedPulling="2025-11-21 14:17:38.656107679 +0000 UTC m=+849.308306938" observedRunningTime="2025-11-21 14:17:39.061625862 +0000 UTC m=+849.713825151" watchObservedRunningTime="2025-11-21 14:17:39.065328548 +0000 UTC m=+849.717527807" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.888636 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vhtfp"] Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.890694 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.899520 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vhtfp"] Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.998672 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggj55\" (UniqueName: \"kubernetes.io/projected/c428054a-f5ab-4a08-ad95-9a560700fa1a-kube-api-access-ggj55\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.998948 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-utilities\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:39 crc kubenswrapper[4774]: I1121 14:17:39.999124 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-catalog-content\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.100932 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-catalog-content\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.101042 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggj55\" (UniqueName: \"kubernetes.io/projected/c428054a-f5ab-4a08-ad95-9a560700fa1a-kube-api-access-ggj55\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.101108 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-utilities\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.101600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-catalog-content\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.101776 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-utilities\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.142028 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggj55\" (UniqueName: \"kubernetes.io/projected/c428054a-f5ab-4a08-ad95-9a560700fa1a-kube-api-access-ggj55\") pod \"community-operators-vhtfp\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.213223 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.685190 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w8mpv"] Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.686233 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-w8mpv" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="registry-server" containerID="cri-o://ce14e8e5d598d2d1f93a848c35472de6d2c2f689d57f6daa507d2aabb3d68e17" gracePeriod=2 Nov 21 14:17:40 crc kubenswrapper[4774]: I1121 14:17:40.764202 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vhtfp"] Nov 21 14:17:40 crc kubenswrapper[4774]: W1121 14:17:40.822583 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc428054a_f5ab_4a08_ad95_9a560700fa1a.slice/crio-02ef46072dd8f85842e8458a4e11d866b6dc91bc0077537320ec832c92946a0f WatchSource:0}: Error finding container 02ef46072dd8f85842e8458a4e11d866b6dc91bc0077537320ec832c92946a0f: Status 404 returned error can't find the container with id 02ef46072dd8f85842e8458a4e11d866b6dc91bc0077537320ec832c92946a0f Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.032085 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerStarted","Data":"97323bea75c40d4c1ca9636a206164399f3ec18e261001ce787ee8760489a0ea"} Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.032606 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerStarted","Data":"02ef46072dd8f85842e8458a4e11d866b6dc91bc0077537320ec832c92946a0f"} Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.039642 4774 generic.go:334] "Generic (PLEG): container finished" podID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerID="ce14e8e5d598d2d1f93a848c35472de6d2c2f689d57f6daa507d2aabb3d68e17" exitCode=0 Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.039708 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerDied","Data":"ce14e8e5d598d2d1f93a848c35472de6d2c2f689d57f6daa507d2aabb3d68e17"} Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.513147 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.633112 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-utilities\") pod \"4f0c3157-9bc6-47a6-a645-15604038f88f\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.633240 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-catalog-content\") pod \"4f0c3157-9bc6-47a6-a645-15604038f88f\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.634177 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-utilities" (OuterVolumeSpecName: "utilities") pod "4f0c3157-9bc6-47a6-a645-15604038f88f" (UID: "4f0c3157-9bc6-47a6-a645-15604038f88f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.635143 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz6tw\" (UniqueName: \"kubernetes.io/projected/4f0c3157-9bc6-47a6-a645-15604038f88f-kube-api-access-vz6tw\") pod \"4f0c3157-9bc6-47a6-a645-15604038f88f\" (UID: \"4f0c3157-9bc6-47a6-a645-15604038f88f\") " Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.635882 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.643188 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f0c3157-9bc6-47a6-a645-15604038f88f-kube-api-access-vz6tw" (OuterVolumeSpecName: "kube-api-access-vz6tw") pod "4f0c3157-9bc6-47a6-a645-15604038f88f" (UID: "4f0c3157-9bc6-47a6-a645-15604038f88f"). InnerVolumeSpecName "kube-api-access-vz6tw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.724133 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f0c3157-9bc6-47a6-a645-15604038f88f" (UID: "4f0c3157-9bc6-47a6-a645-15604038f88f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.737490 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz6tw\" (UniqueName: \"kubernetes.io/projected/4f0c3157-9bc6-47a6-a645-15604038f88f-kube-api-access-vz6tw\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:41 crc kubenswrapper[4774]: I1121 14:17:41.737533 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f0c3157-9bc6-47a6-a645-15604038f88f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.051535 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-w8mpv" event={"ID":"4f0c3157-9bc6-47a6-a645-15604038f88f","Type":"ContainerDied","Data":"9472efda0427bd6e3686f2e498bea7a072e7286aee7032b07a806bba52d95518"} Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.051622 4774 scope.go:117] "RemoveContainer" containerID="ce14e8e5d598d2d1f93a848c35472de6d2c2f689d57f6daa507d2aabb3d68e17" Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.051771 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-w8mpv" Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.059368 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" event={"ID":"6a94325f-9054-4c04-a0fa-64490ec11e50","Type":"ContainerStarted","Data":"5bc3ac4154e5bc243a1dcab591a94aef11165994b7efee0145d1686c6192095c"} Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.070417 4774 scope.go:117] "RemoveContainer" containerID="563c1fdb84ad734427c7f7d45fd8757debbf40ce4d8f257e3438b781445a989d" Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.072347 4774 generic.go:334] "Generic (PLEG): container finished" podID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerID="97323bea75c40d4c1ca9636a206164399f3ec18e261001ce787ee8760489a0ea" exitCode=0 Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.072410 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerDied","Data":"97323bea75c40d4c1ca9636a206164399f3ec18e261001ce787ee8760489a0ea"} Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.090115 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-5dcf9c57c5-85d2j" podStartSLOduration=2.419357396 podStartE2EDuration="8.090085798s" podCreationTimestamp="2025-11-21 14:17:34 +0000 UTC" firstStartedPulling="2025-11-21 14:17:35.856302686 +0000 UTC m=+846.508501945" lastFinishedPulling="2025-11-21 14:17:41.527031088 +0000 UTC m=+852.179230347" observedRunningTime="2025-11-21 14:17:42.083474559 +0000 UTC m=+852.735673828" watchObservedRunningTime="2025-11-21 14:17:42.090085798 +0000 UTC m=+852.742285057" Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.114013 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-w8mpv"] Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.115840 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-w8mpv"] Nov 21 14:17:42 crc kubenswrapper[4774]: I1121 14:17:42.117350 4774 scope.go:117] "RemoveContainer" containerID="2a7d46fbc1d640532ff1463b29904841bd5a3dd33559cdcb7f06f8d5e05d3829" Nov 21 14:17:43 crc kubenswrapper[4774]: I1121 14:17:43.080429 4774 generic.go:334] "Generic (PLEG): container finished" podID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerID="ee210bb7d83cd44aa30c95b3c16cb453249ad707b49dc3cfbc8893494001cb8d" exitCode=0 Nov 21 14:17:43 crc kubenswrapper[4774]: I1121 14:17:43.080550 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerDied","Data":"ee210bb7d83cd44aa30c95b3c16cb453249ad707b49dc3cfbc8893494001cb8d"} Nov 21 14:17:44 crc kubenswrapper[4774]: I1121 14:17:44.107309 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" path="/var/lib/kubelet/pods/4f0c3157-9bc6-47a6-a645-15604038f88f/volumes" Nov 21 14:17:44 crc kubenswrapper[4774]: I1121 14:17:44.107998 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerStarted","Data":"8fd9e105cc5213604b894474234d821cd591d3b432d815c133455599523dce00"} Nov 21 14:17:44 crc kubenswrapper[4774]: I1121 14:17:44.114878 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vhtfp" podStartSLOduration=3.038338938 podStartE2EDuration="5.114799293s" podCreationTimestamp="2025-11-21 14:17:39 +0000 UTC" firstStartedPulling="2025-11-21 14:17:41.45056502 +0000 UTC m=+852.102764279" lastFinishedPulling="2025-11-21 14:17:43.527025375 +0000 UTC m=+854.179224634" observedRunningTime="2025-11-21 14:17:44.112740615 +0000 UTC m=+854.764939874" watchObservedRunningTime="2025-11-21 14:17:44.114799293 +0000 UTC m=+854.766998552" Nov 21 14:17:45 crc kubenswrapper[4774]: I1121 14:17:45.372792 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-wwdp7" Nov 21 14:17:45 crc kubenswrapper[4774]: I1121 14:17:45.774621 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:45 crc kubenswrapper[4774]: I1121 14:17:45.774668 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:45 crc kubenswrapper[4774]: I1121 14:17:45.781346 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:46 crc kubenswrapper[4774]: I1121 14:17:46.115289 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-9cffd6b86-qcnwd" Nov 21 14:17:46 crc kubenswrapper[4774]: I1121 14:17:46.168711 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-w7tjv"] Nov 21 14:17:50 crc kubenswrapper[4774]: I1121 14:17:50.213765 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:50 crc kubenswrapper[4774]: I1121 14:17:50.215038 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:50 crc kubenswrapper[4774]: I1121 14:17:50.284312 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:51 crc kubenswrapper[4774]: I1121 14:17:51.184549 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:52 crc kubenswrapper[4774]: I1121 14:17:52.679156 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vhtfp"] Nov 21 14:17:53 crc kubenswrapper[4774]: I1121 14:17:53.152497 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vhtfp" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="registry-server" containerID="cri-o://8fd9e105cc5213604b894474234d821cd591d3b432d815c133455599523dce00" gracePeriod=2 Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.165694 4774 generic.go:334] "Generic (PLEG): container finished" podID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerID="8fd9e105cc5213604b894474234d821cd591d3b432d815c133455599523dce00" exitCode=0 Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.165741 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerDied","Data":"8fd9e105cc5213604b894474234d821cd591d3b432d815c133455599523dce00"} Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.238777 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.435360 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-utilities\") pod \"c428054a-f5ab-4a08-ad95-9a560700fa1a\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.435483 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggj55\" (UniqueName: \"kubernetes.io/projected/c428054a-f5ab-4a08-ad95-9a560700fa1a-kube-api-access-ggj55\") pod \"c428054a-f5ab-4a08-ad95-9a560700fa1a\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.435611 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-catalog-content\") pod \"c428054a-f5ab-4a08-ad95-9a560700fa1a\" (UID: \"c428054a-f5ab-4a08-ad95-9a560700fa1a\") " Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.436739 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-utilities" (OuterVolumeSpecName: "utilities") pod "c428054a-f5ab-4a08-ad95-9a560700fa1a" (UID: "c428054a-f5ab-4a08-ad95-9a560700fa1a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.443209 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c428054a-f5ab-4a08-ad95-9a560700fa1a-kube-api-access-ggj55" (OuterVolumeSpecName: "kube-api-access-ggj55") pod "c428054a-f5ab-4a08-ad95-9a560700fa1a" (UID: "c428054a-f5ab-4a08-ad95-9a560700fa1a"). InnerVolumeSpecName "kube-api-access-ggj55". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.488870 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c428054a-f5ab-4a08-ad95-9a560700fa1a" (UID: "c428054a-f5ab-4a08-ad95-9a560700fa1a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.538126 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggj55\" (UniqueName: \"kubernetes.io/projected/c428054a-f5ab-4a08-ad95-9a560700fa1a-kube-api-access-ggj55\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.538187 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:54 crc kubenswrapper[4774]: I1121 14:17:54.538202 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c428054a-f5ab-4a08-ad95-9a560700fa1a-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.178044 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vhtfp" event={"ID":"c428054a-f5ab-4a08-ad95-9a560700fa1a","Type":"ContainerDied","Data":"02ef46072dd8f85842e8458a4e11d866b6dc91bc0077537320ec832c92946a0f"} Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.178144 4774 scope.go:117] "RemoveContainer" containerID="8fd9e105cc5213604b894474234d821cd591d3b432d815c133455599523dce00" Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.178133 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vhtfp" Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.204806 4774 scope.go:117] "RemoveContainer" containerID="ee210bb7d83cd44aa30c95b3c16cb453249ad707b49dc3cfbc8893494001cb8d" Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.220063 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vhtfp"] Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.223872 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vhtfp"] Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.244895 4774 scope.go:117] "RemoveContainer" containerID="97323bea75c40d4c1ca9636a206164399f3ec18e261001ce787ee8760489a0ea" Nov 21 14:17:55 crc kubenswrapper[4774]: I1121 14:17:55.317303 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6b89b748d8-cpslg" Nov 21 14:17:56 crc kubenswrapper[4774]: I1121 14:17:56.099949 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" path="/var/lib/kubelet/pods/c428054a-f5ab-4a08-ad95-9a560700fa1a/volumes" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.340207 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr"] Nov 21 14:18:09 crc kubenswrapper[4774]: E1121 14:18:09.341719 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="registry-server" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.341739 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="registry-server" Nov 21 14:18:09 crc kubenswrapper[4774]: E1121 14:18:09.341772 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="extract-utilities" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.341784 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="extract-utilities" Nov 21 14:18:09 crc kubenswrapper[4774]: E1121 14:18:09.341810 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="extract-content" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.341831 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="extract-content" Nov 21 14:18:09 crc kubenswrapper[4774]: E1121 14:18:09.341839 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="extract-content" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.341845 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="extract-content" Nov 21 14:18:09 crc kubenswrapper[4774]: E1121 14:18:09.341862 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="registry-server" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.341867 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="registry-server" Nov 21 14:18:09 crc kubenswrapper[4774]: E1121 14:18:09.341876 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="extract-utilities" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.341886 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="extract-utilities" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.342119 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c428054a-f5ab-4a08-ad95-9a560700fa1a" containerName="registry-server" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.342146 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f0c3157-9bc6-47a6-a645-15604038f88f" containerName="registry-server" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.344489 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.353747 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.363708 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr"] Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.397758 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trbhz\" (UniqueName: \"kubernetes.io/projected/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-kube-api-access-trbhz\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.398051 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.398112 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.499428 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trbhz\" (UniqueName: \"kubernetes.io/projected/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-kube-api-access-trbhz\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.499516 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.499544 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.500128 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.500227 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.524612 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trbhz\" (UniqueName: \"kubernetes.io/projected/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-kube-api-access-trbhz\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.713655 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:09 crc kubenswrapper[4774]: I1121 14:18:09.924559 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr"] Nov 21 14:18:10 crc kubenswrapper[4774]: I1121 14:18:10.292205 4774 generic.go:334] "Generic (PLEG): container finished" podID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerID="7bc1330ef26d5ae861e3ea06502a7e878db3c55860e182b286cc40662ffa3db4" exitCode=0 Nov 21 14:18:10 crc kubenswrapper[4774]: I1121 14:18:10.292264 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" event={"ID":"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd","Type":"ContainerDied","Data":"7bc1330ef26d5ae861e3ea06502a7e878db3c55860e182b286cc40662ffa3db4"} Nov 21 14:18:10 crc kubenswrapper[4774]: I1121 14:18:10.292298 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" event={"ID":"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd","Type":"ContainerStarted","Data":"0bae65a9edd30ee0daecc3aedf3610adc26481b422bdd4132e0a6086b3da5a12"} Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.214687 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-w7tjv" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerName="console" containerID="cri-o://ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6" gracePeriod=15 Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.661079 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-w7tjv_b94e7447-7c8a-4f4e-9507-689f1500605c/console/0.log" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.661607 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.834702 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2hp\" (UniqueName: \"kubernetes.io/projected/b94e7447-7c8a-4f4e-9507-689f1500605c-kube-api-access-bf2hp\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.834807 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-oauth-serving-cert\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.834973 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-service-ca\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.835008 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-serving-cert\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.835074 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-trusted-ca-bundle\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.835106 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-console-config\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.835194 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-oauth-config\") pod \"b94e7447-7c8a-4f4e-9507-689f1500605c\" (UID: \"b94e7447-7c8a-4f4e-9507-689f1500605c\") " Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.835776 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-service-ca" (OuterVolumeSpecName: "service-ca") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.835775 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.836476 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.836501 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-console-config" (OuterVolumeSpecName: "console-config") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.841958 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.843455 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b94e7447-7c8a-4f4e-9507-689f1500605c-kube-api-access-bf2hp" (OuterVolumeSpecName: "kube-api-access-bf2hp") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "kube-api-access-bf2hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.844062 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b94e7447-7c8a-4f4e-9507-689f1500605c" (UID: "b94e7447-7c8a-4f4e-9507-689f1500605c"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936694 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2hp\" (UniqueName: \"kubernetes.io/projected/b94e7447-7c8a-4f4e-9507-689f1500605c-kube-api-access-bf2hp\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936748 4774 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936761 4774 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-service-ca\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936777 4774 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936788 4774 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936799 4774 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b94e7447-7c8a-4f4e-9507-689f1500605c-console-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:11 crc kubenswrapper[4774]: I1121 14:18:11.936810 4774 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b94e7447-7c8a-4f4e-9507-689f1500605c-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.307405 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-w7tjv_b94e7447-7c8a-4f4e-9507-689f1500605c/console/0.log" Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.307475 4774 generic.go:334] "Generic (PLEG): container finished" podID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerID="ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6" exitCode=2 Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.307523 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-w7tjv" event={"ID":"b94e7447-7c8a-4f4e-9507-689f1500605c","Type":"ContainerDied","Data":"ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6"} Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.307560 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-w7tjv" event={"ID":"b94e7447-7c8a-4f4e-9507-689f1500605c","Type":"ContainerDied","Data":"f947e1763846ee89c51c09cc4d4b74cf15386b9610b51c8f020f6309350d8064"} Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.307582 4774 scope.go:117] "RemoveContainer" containerID="ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6" Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.307679 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-w7tjv" Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.337105 4774 scope.go:117] "RemoveContainer" containerID="ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6" Nov 21 14:18:12 crc kubenswrapper[4774]: E1121 14:18:12.350247 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6\": container with ID starting with ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6 not found: ID does not exist" containerID="ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6" Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.350377 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6"} err="failed to get container status \"ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6\": rpc error: code = NotFound desc = could not find container \"ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6\": container with ID starting with ef28e7e63cd3e14526b5f8b349506b070e2ce1e7fd014ea2cfc5b98af57ca7b6 not found: ID does not exist" Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.350514 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-w7tjv"] Nov 21 14:18:12 crc kubenswrapper[4774]: I1121 14:18:12.357882 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-w7tjv"] Nov 21 14:18:13 crc kubenswrapper[4774]: I1121 14:18:13.318369 4774 generic.go:334] "Generic (PLEG): container finished" podID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerID="f7498f9c3bec13854768d94ba194fa13c55a0310377f83476d883159c250f3e0" exitCode=0 Nov 21 14:18:13 crc kubenswrapper[4774]: I1121 14:18:13.318412 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" event={"ID":"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd","Type":"ContainerDied","Data":"f7498f9c3bec13854768d94ba194fa13c55a0310377f83476d883159c250f3e0"} Nov 21 14:18:14 crc kubenswrapper[4774]: I1121 14:18:14.103092 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" path="/var/lib/kubelet/pods/b94e7447-7c8a-4f4e-9507-689f1500605c/volumes" Nov 21 14:18:14 crc kubenswrapper[4774]: I1121 14:18:14.332261 4774 generic.go:334] "Generic (PLEG): container finished" podID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerID="4ac607629c2e62ef9fcc169defaa385016e169f608395fa006ac8a8edad78958" exitCode=0 Nov 21 14:18:14 crc kubenswrapper[4774]: I1121 14:18:14.332369 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" event={"ID":"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd","Type":"ContainerDied","Data":"4ac607629c2e62ef9fcc169defaa385016e169f608395fa006ac8a8edad78958"} Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.586908 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.695211 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-bundle\") pod \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.695428 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trbhz\" (UniqueName: \"kubernetes.io/projected/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-kube-api-access-trbhz\") pod \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.695464 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-util\") pod \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\" (UID: \"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd\") " Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.696613 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-bundle" (OuterVolumeSpecName: "bundle") pod "e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" (UID: "e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.703174 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-kube-api-access-trbhz" (OuterVolumeSpecName: "kube-api-access-trbhz") pod "e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" (UID: "e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd"). InnerVolumeSpecName "kube-api-access-trbhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.797563 4774 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:15 crc kubenswrapper[4774]: I1121 14:18:15.797636 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trbhz\" (UniqueName: \"kubernetes.io/projected/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-kube-api-access-trbhz\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:16 crc kubenswrapper[4774]: I1121 14:18:16.075166 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-util" (OuterVolumeSpecName: "util") pod "e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" (UID: "e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:18:16 crc kubenswrapper[4774]: I1121 14:18:16.104340 4774 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd-util\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:16 crc kubenswrapper[4774]: I1121 14:18:16.347303 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" event={"ID":"e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd","Type":"ContainerDied","Data":"0bae65a9edd30ee0daecc3aedf3610adc26481b422bdd4132e0a6086b3da5a12"} Nov 21 14:18:16 crc kubenswrapper[4774]: I1121 14:18:16.347367 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bae65a9edd30ee0daecc3aedf3610adc26481b422bdd4132e0a6086b3da5a12" Nov 21 14:18:16 crc kubenswrapper[4774]: I1121 14:18:16.347368 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.490986 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnkm"] Nov 21 14:18:22 crc kubenswrapper[4774]: E1121 14:18:22.493809 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerName="console" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.493928 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerName="console" Nov 21 14:18:22 crc kubenswrapper[4774]: E1121 14:18:22.493994 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="util" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.494051 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="util" Nov 21 14:18:22 crc kubenswrapper[4774]: E1121 14:18:22.494141 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="pull" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.494221 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="pull" Nov 21 14:18:22 crc kubenswrapper[4774]: E1121 14:18:22.494288 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="extract" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.494346 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="extract" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.494568 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd" containerName="extract" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.494651 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b94e7447-7c8a-4f4e-9507-689f1500605c" containerName="console" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.495954 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.511880 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnkm"] Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.593664 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-catalog-content\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.593764 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kp75k\" (UniqueName: \"kubernetes.io/projected/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-kube-api-access-kp75k\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.593833 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-utilities\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.695490 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-catalog-content\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.695562 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kp75k\" (UniqueName: \"kubernetes.io/projected/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-kube-api-access-kp75k\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.695605 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-utilities\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.696216 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-utilities\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.696318 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-catalog-content\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.726314 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kp75k\" (UniqueName: \"kubernetes.io/projected/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-kube-api-access-kp75k\") pod \"redhat-marketplace-nqnkm\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:22 crc kubenswrapper[4774]: I1121 14:18:22.822363 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.089850 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t5slm"] Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.095226 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.102638 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t5slm"] Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.203462 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-catalog-content\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.203556 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgqff\" (UniqueName: \"kubernetes.io/projected/9ba443c5-8c51-419d-b520-7da8a1df2159-kube-api-access-qgqff\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.203985 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-utilities\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.305462 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-utilities\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.305561 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-catalog-content\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.305585 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgqff\" (UniqueName: \"kubernetes.io/projected/9ba443c5-8c51-419d-b520-7da8a1df2159-kube-api-access-qgqff\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.306048 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-catalog-content\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.306092 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-utilities\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.338496 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgqff\" (UniqueName: \"kubernetes.io/projected/9ba443c5-8c51-419d-b520-7da8a1df2159-kube-api-access-qgqff\") pod \"certified-operators-t5slm\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.380798 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnkm"] Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.421056 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:23 crc kubenswrapper[4774]: I1121 14:18:23.689172 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t5slm"] Nov 21 14:18:24 crc kubenswrapper[4774]: I1121 14:18:24.396293 4774 generic.go:334] "Generic (PLEG): container finished" podID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerID="ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1" exitCode=0 Nov 21 14:18:24 crc kubenswrapper[4774]: I1121 14:18:24.396365 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerDied","Data":"ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1"} Nov 21 14:18:24 crc kubenswrapper[4774]: I1121 14:18:24.396396 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerStarted","Data":"777398dc3bcf67f7c887bc421110538cf92c908dc9ff12171c4077ee38362720"} Nov 21 14:18:24 crc kubenswrapper[4774]: I1121 14:18:24.399135 4774 generic.go:334] "Generic (PLEG): container finished" podID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerID="713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281" exitCode=0 Nov 21 14:18:24 crc kubenswrapper[4774]: I1121 14:18:24.399171 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerDied","Data":"713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281"} Nov 21 14:18:24 crc kubenswrapper[4774]: I1121 14:18:24.399194 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerStarted","Data":"63aa9cf5e5f6036265424c5318dc473a9edc0ca47f0938ba37bbbeb619bb70a7"} Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.418426 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerStarted","Data":"0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559"} Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.421075 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerStarted","Data":"66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0"} Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.872034 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk"] Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.873026 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.892166 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.892166 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-5xc7s" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.892170 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.892348 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.892173 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.916532 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk"] Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.960952 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70f8c673-949c-45d9-881b-eaf99a5fc797-apiservice-cert\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.961172 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70f8c673-949c-45d9-881b-eaf99a5fc797-webhook-cert\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:26 crc kubenswrapper[4774]: I1121 14:18:26.961307 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjnmz\" (UniqueName: \"kubernetes.io/projected/70f8c673-949c-45d9-881b-eaf99a5fc797-kube-api-access-xjnmz\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.063304 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70f8c673-949c-45d9-881b-eaf99a5fc797-apiservice-cert\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.063387 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70f8c673-949c-45d9-881b-eaf99a5fc797-webhook-cert\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.063413 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjnmz\" (UniqueName: \"kubernetes.io/projected/70f8c673-949c-45d9-881b-eaf99a5fc797-kube-api-access-xjnmz\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.072581 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/70f8c673-949c-45d9-881b-eaf99a5fc797-apiservice-cert\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.073421 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/70f8c673-949c-45d9-881b-eaf99a5fc797-webhook-cert\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.108124 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjnmz\" (UniqueName: \"kubernetes.io/projected/70f8c673-949c-45d9-881b-eaf99a5fc797-kube-api-access-xjnmz\") pod \"metallb-operator-controller-manager-6b9b569fbd-zv6fk\" (UID: \"70f8c673-949c-45d9-881b-eaf99a5fc797\") " pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.110131 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-944c54d44-trkz5"] Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.119198 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.122222 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.122989 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.123351 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-9ktqw" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.130299 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-944c54d44-trkz5"] Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.164922 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ede42a0-d5c9-4693-b58c-e11a09887d36-apiservice-cert\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.164990 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rm84\" (UniqueName: \"kubernetes.io/projected/5ede42a0-d5c9-4693-b58c-e11a09887d36-kube-api-access-2rm84\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.165029 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ede42a0-d5c9-4693-b58c-e11a09887d36-webhook-cert\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.190689 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.267214 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ede42a0-d5c9-4693-b58c-e11a09887d36-apiservice-cert\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.267284 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rm84\" (UniqueName: \"kubernetes.io/projected/5ede42a0-d5c9-4693-b58c-e11a09887d36-kube-api-access-2rm84\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.267335 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ede42a0-d5c9-4693-b58c-e11a09887d36-webhook-cert\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.272458 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5ede42a0-d5c9-4693-b58c-e11a09887d36-webhook-cert\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.290672 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5ede42a0-d5c9-4693-b58c-e11a09887d36-apiservice-cert\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.295871 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rm84\" (UniqueName: \"kubernetes.io/projected/5ede42a0-d5c9-4693-b58c-e11a09887d36-kube-api-access-2rm84\") pod \"metallb-operator-webhook-server-944c54d44-trkz5\" (UID: \"5ede42a0-d5c9-4693-b58c-e11a09887d36\") " pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.434188 4774 generic.go:334] "Generic (PLEG): container finished" podID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerID="0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559" exitCode=0 Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.434336 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerDied","Data":"0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559"} Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.442045 4774 generic.go:334] "Generic (PLEG): container finished" podID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerID="66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0" exitCode=0 Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.442438 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerDied","Data":"66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0"} Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.449994 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.697331 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk"] Nov 21 14:18:27 crc kubenswrapper[4774]: I1121 14:18:27.746976 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-944c54d44-trkz5"] Nov 21 14:18:27 crc kubenswrapper[4774]: W1121 14:18:27.753941 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ede42a0_d5c9_4693_b58c_e11a09887d36.slice/crio-2ca1f673800803569e37ec544d82c399655618b953d213ad7e7a0e60caa024d4 WatchSource:0}: Error finding container 2ca1f673800803569e37ec544d82c399655618b953d213ad7e7a0e60caa024d4: Status 404 returned error can't find the container with id 2ca1f673800803569e37ec544d82c399655618b953d213ad7e7a0e60caa024d4 Nov 21 14:18:28 crc kubenswrapper[4774]: I1121 14:18:28.452283 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" event={"ID":"70f8c673-949c-45d9-881b-eaf99a5fc797","Type":"ContainerStarted","Data":"9d2be81405473fbf14327ceb7accd94bcabbeae8fbbe305cd16718b5af13a0b1"} Nov 21 14:18:28 crc kubenswrapper[4774]: I1121 14:18:28.455882 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerStarted","Data":"f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa"} Nov 21 14:18:28 crc kubenswrapper[4774]: I1121 14:18:28.458110 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" event={"ID":"5ede42a0-d5c9-4693-b58c-e11a09887d36","Type":"ContainerStarted","Data":"2ca1f673800803569e37ec544d82c399655618b953d213ad7e7a0e60caa024d4"} Nov 21 14:18:28 crc kubenswrapper[4774]: I1121 14:18:28.461688 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerStarted","Data":"7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2"} Nov 21 14:18:28 crc kubenswrapper[4774]: I1121 14:18:28.480050 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t5slm" podStartSLOduration=1.726997734 podStartE2EDuration="5.480021173s" podCreationTimestamp="2025-11-21 14:18:23 +0000 UTC" firstStartedPulling="2025-11-21 14:18:24.398097414 +0000 UTC m=+895.050296673" lastFinishedPulling="2025-11-21 14:18:28.151120853 +0000 UTC m=+898.803320112" observedRunningTime="2025-11-21 14:18:28.477321216 +0000 UTC m=+899.129520465" watchObservedRunningTime="2025-11-21 14:18:28.480021173 +0000 UTC m=+899.132220432" Nov 21 14:18:29 crc kubenswrapper[4774]: I1121 14:18:29.601995 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:18:29 crc kubenswrapper[4774]: I1121 14:18:29.602083 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:18:30 crc kubenswrapper[4774]: I1121 14:18:30.124020 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nqnkm" podStartSLOduration=4.456317308 podStartE2EDuration="8.123990784s" podCreationTimestamp="2025-11-21 14:18:22 +0000 UTC" firstStartedPulling="2025-11-21 14:18:24.400904175 +0000 UTC m=+895.053103434" lastFinishedPulling="2025-11-21 14:18:28.068577651 +0000 UTC m=+898.720776910" observedRunningTime="2025-11-21 14:18:28.504784922 +0000 UTC m=+899.156984181" watchObservedRunningTime="2025-11-21 14:18:30.123990784 +0000 UTC m=+900.776190043" Nov 21 14:18:32 crc kubenswrapper[4774]: I1121 14:18:32.493508 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" event={"ID":"70f8c673-949c-45d9-881b-eaf99a5fc797","Type":"ContainerStarted","Data":"dee2c5a38b31150738a5ec2408b542dc61c3ab43ba7aabcd9636e2fbe27ec5ca"} Nov 21 14:18:32 crc kubenswrapper[4774]: I1121 14:18:32.494052 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:18:32 crc kubenswrapper[4774]: I1121 14:18:32.527495 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" podStartSLOduration=2.817000005 podStartE2EDuration="6.527472576s" podCreationTimestamp="2025-11-21 14:18:26 +0000 UTC" firstStartedPulling="2025-11-21 14:18:27.718119223 +0000 UTC m=+898.370318482" lastFinishedPulling="2025-11-21 14:18:31.428591794 +0000 UTC m=+902.080791053" observedRunningTime="2025-11-21 14:18:32.52305715 +0000 UTC m=+903.175256419" watchObservedRunningTime="2025-11-21 14:18:32.527472576 +0000 UTC m=+903.179671835" Nov 21 14:18:32 crc kubenswrapper[4774]: I1121 14:18:32.823521 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:32 crc kubenswrapper[4774]: I1121 14:18:32.823767 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:32 crc kubenswrapper[4774]: I1121 14:18:32.894769 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:33 crc kubenswrapper[4774]: I1121 14:18:33.421640 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:33 crc kubenswrapper[4774]: I1121 14:18:33.421687 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:33 crc kubenswrapper[4774]: I1121 14:18:33.464212 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:33 crc kubenswrapper[4774]: I1121 14:18:33.542314 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:33 crc kubenswrapper[4774]: I1121 14:18:33.547565 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:34 crc kubenswrapper[4774]: I1121 14:18:34.506807 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" event={"ID":"5ede42a0-d5c9-4693-b58c-e11a09887d36","Type":"ContainerStarted","Data":"b6fa00dd6fe47795fef977c05440d07b811ce42a54257dea95b14f0e5feedbb3"} Nov 21 14:18:34 crc kubenswrapper[4774]: I1121 14:18:34.528421 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" podStartSLOduration=1.5595845210000001 podStartE2EDuration="7.528389351s" podCreationTimestamp="2025-11-21 14:18:27 +0000 UTC" firstStartedPulling="2025-11-21 14:18:27.758062246 +0000 UTC m=+898.410261505" lastFinishedPulling="2025-11-21 14:18:33.726867076 +0000 UTC m=+904.379066335" observedRunningTime="2025-11-21 14:18:34.525195239 +0000 UTC m=+905.177394498" watchObservedRunningTime="2025-11-21 14:18:34.528389351 +0000 UTC m=+905.180588600" Nov 21 14:18:35 crc kubenswrapper[4774]: I1121 14:18:35.514336 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:35 crc kubenswrapper[4774]: I1121 14:18:35.679419 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnkm"] Nov 21 14:18:35 crc kubenswrapper[4774]: I1121 14:18:35.879781 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t5slm"] Nov 21 14:18:35 crc kubenswrapper[4774]: I1121 14:18:35.880137 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t5slm" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="registry-server" containerID="cri-o://f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa" gracePeriod=2 Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.294879 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.432953 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-utilities\") pod \"9ba443c5-8c51-419d-b520-7da8a1df2159\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.433040 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-catalog-content\") pod \"9ba443c5-8c51-419d-b520-7da8a1df2159\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.433188 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgqff\" (UniqueName: \"kubernetes.io/projected/9ba443c5-8c51-419d-b520-7da8a1df2159-kube-api-access-qgqff\") pod \"9ba443c5-8c51-419d-b520-7da8a1df2159\" (UID: \"9ba443c5-8c51-419d-b520-7da8a1df2159\") " Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.434758 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-utilities" (OuterVolumeSpecName: "utilities") pod "9ba443c5-8c51-419d-b520-7da8a1df2159" (UID: "9ba443c5-8c51-419d-b520-7da8a1df2159"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.449702 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ba443c5-8c51-419d-b520-7da8a1df2159-kube-api-access-qgqff" (OuterVolumeSpecName: "kube-api-access-qgqff") pod "9ba443c5-8c51-419d-b520-7da8a1df2159" (UID: "9ba443c5-8c51-419d-b520-7da8a1df2159"). InnerVolumeSpecName "kube-api-access-qgqff". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.485124 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ba443c5-8c51-419d-b520-7da8a1df2159" (UID: "9ba443c5-8c51-419d-b520-7da8a1df2159"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.524236 4774 generic.go:334] "Generic (PLEG): container finished" podID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerID="f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa" exitCode=0 Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.524355 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerDied","Data":"f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa"} Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.524418 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5slm" event={"ID":"9ba443c5-8c51-419d-b520-7da8a1df2159","Type":"ContainerDied","Data":"777398dc3bcf67f7c887bc421110538cf92c908dc9ff12171c4077ee38362720"} Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.524440 4774 scope.go:117] "RemoveContainer" containerID="f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.524434 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5slm" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.524743 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nqnkm" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="registry-server" containerID="cri-o://7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2" gracePeriod=2 Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.536001 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgqff\" (UniqueName: \"kubernetes.io/projected/9ba443c5-8c51-419d-b520-7da8a1df2159-kube-api-access-qgqff\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.536042 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.536054 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ba443c5-8c51-419d-b520-7da8a1df2159-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.547797 4774 scope.go:117] "RemoveContainer" containerID="66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.562690 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t5slm"] Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.565959 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t5slm"] Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.583814 4774 scope.go:117] "RemoveContainer" containerID="ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.609834 4774 scope.go:117] "RemoveContainer" containerID="f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa" Nov 21 14:18:36 crc kubenswrapper[4774]: E1121 14:18:36.610445 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa\": container with ID starting with f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa not found: ID does not exist" containerID="f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.610563 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa"} err="failed to get container status \"f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa\": rpc error: code = NotFound desc = could not find container \"f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa\": container with ID starting with f7040d1625990ea9d5f225cc39b1ada21f4502040187fbde56b5a7764432abfa not found: ID does not exist" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.610622 4774 scope.go:117] "RemoveContainer" containerID="66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0" Nov 21 14:18:36 crc kubenswrapper[4774]: E1121 14:18:36.611082 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0\": container with ID starting with 66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0 not found: ID does not exist" containerID="66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.611118 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0"} err="failed to get container status \"66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0\": rpc error: code = NotFound desc = could not find container \"66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0\": container with ID starting with 66ade80c15636e4280bb2e75ecec92e50c5a309d863415f472b831f7976cf4c0 not found: ID does not exist" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.611137 4774 scope.go:117] "RemoveContainer" containerID="ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1" Nov 21 14:18:36 crc kubenswrapper[4774]: E1121 14:18:36.611432 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1\": container with ID starting with ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1 not found: ID does not exist" containerID="ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.611468 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1"} err="failed to get container status \"ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1\": rpc error: code = NotFound desc = could not find container \"ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1\": container with ID starting with ef3f75898fd9d58897366fab1427c352abcdf893119205d89b31833c69c2abc1 not found: ID does not exist" Nov 21 14:18:36 crc kubenswrapper[4774]: I1121 14:18:36.939050 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.049497 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kp75k\" (UniqueName: \"kubernetes.io/projected/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-kube-api-access-kp75k\") pod \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.049638 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-catalog-content\") pod \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.049677 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-utilities\") pod \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\" (UID: \"0df44fe7-135d-4fdb-8b90-33a7e30b16cc\") " Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.050759 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-utilities" (OuterVolumeSpecName: "utilities") pod "0df44fe7-135d-4fdb-8b90-33a7e30b16cc" (UID: "0df44fe7-135d-4fdb-8b90-33a7e30b16cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.053789 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-kube-api-access-kp75k" (OuterVolumeSpecName: "kube-api-access-kp75k") pod "0df44fe7-135d-4fdb-8b90-33a7e30b16cc" (UID: "0df44fe7-135d-4fdb-8b90-33a7e30b16cc"). InnerVolumeSpecName "kube-api-access-kp75k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.068156 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0df44fe7-135d-4fdb-8b90-33a7e30b16cc" (UID: "0df44fe7-135d-4fdb-8b90-33a7e30b16cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.152360 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kp75k\" (UniqueName: \"kubernetes.io/projected/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-kube-api-access-kp75k\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.152495 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.152508 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0df44fe7-135d-4fdb-8b90-33a7e30b16cc-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.534222 4774 generic.go:334] "Generic (PLEG): container finished" podID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerID="7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2" exitCode=0 Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.534285 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerDied","Data":"7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2"} Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.534326 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nqnkm" event={"ID":"0df44fe7-135d-4fdb-8b90-33a7e30b16cc","Type":"ContainerDied","Data":"63aa9cf5e5f6036265424c5318dc473a9edc0ca47f0938ba37bbbeb619bb70a7"} Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.534355 4774 scope.go:117] "RemoveContainer" containerID="7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.534476 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nqnkm" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.568295 4774 scope.go:117] "RemoveContainer" containerID="0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.569282 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnkm"] Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.575354 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nqnkm"] Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.589522 4774 scope.go:117] "RemoveContainer" containerID="713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.610291 4774 scope.go:117] "RemoveContainer" containerID="7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2" Nov 21 14:18:37 crc kubenswrapper[4774]: E1121 14:18:37.611015 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2\": container with ID starting with 7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2 not found: ID does not exist" containerID="7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.611081 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2"} err="failed to get container status \"7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2\": rpc error: code = NotFound desc = could not find container \"7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2\": container with ID starting with 7c67a0ba47900c57cc8158742b139c395b39730d5f735d45b03482e2769253c2 not found: ID does not exist" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.611117 4774 scope.go:117] "RemoveContainer" containerID="0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559" Nov 21 14:18:37 crc kubenswrapper[4774]: E1121 14:18:37.611514 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559\": container with ID starting with 0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559 not found: ID does not exist" containerID="0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.611572 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559"} err="failed to get container status \"0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559\": rpc error: code = NotFound desc = could not find container \"0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559\": container with ID starting with 0e5edd4ae252bc4f61d752de4605c2567eed792f91fb956cdf7378c074f97559 not found: ID does not exist" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.611649 4774 scope.go:117] "RemoveContainer" containerID="713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281" Nov 21 14:18:37 crc kubenswrapper[4774]: E1121 14:18:37.612000 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281\": container with ID starting with 713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281 not found: ID does not exist" containerID="713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281" Nov 21 14:18:37 crc kubenswrapper[4774]: I1121 14:18:37.612023 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281"} err="failed to get container status \"713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281\": rpc error: code = NotFound desc = could not find container \"713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281\": container with ID starting with 713de82ef657033f5252b0c876e5bf008b9dc135fd2253f9754e5ffd2c466281 not found: ID does not exist" Nov 21 14:18:38 crc kubenswrapper[4774]: I1121 14:18:38.100499 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" path="/var/lib/kubelet/pods/0df44fe7-135d-4fdb-8b90-33a7e30b16cc/volumes" Nov 21 14:18:38 crc kubenswrapper[4774]: I1121 14:18:38.101408 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" path="/var/lib/kubelet/pods/9ba443c5-8c51-419d-b520-7da8a1df2159/volumes" Nov 21 14:18:47 crc kubenswrapper[4774]: I1121 14:18:47.455364 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-944c54d44-trkz5" Nov 21 14:18:59 crc kubenswrapper[4774]: I1121 14:18:59.600959 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:18:59 crc kubenswrapper[4774]: I1121 14:18:59.601961 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.193907 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6b9b569fbd-zv6fk" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.885544 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-vxqzf"] Nov 21 14:19:07 crc kubenswrapper[4774]: E1121 14:19:07.886154 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="registry-server" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886181 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="registry-server" Nov 21 14:19:07 crc kubenswrapper[4774]: E1121 14:19:07.886197 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="extract-utilities" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886206 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="extract-utilities" Nov 21 14:19:07 crc kubenswrapper[4774]: E1121 14:19:07.886215 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="extract-content" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886222 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="extract-content" Nov 21 14:19:07 crc kubenswrapper[4774]: E1121 14:19:07.886237 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="extract-utilities" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886245 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="extract-utilities" Nov 21 14:19:07 crc kubenswrapper[4774]: E1121 14:19:07.886273 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="registry-server" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886280 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="registry-server" Nov 21 14:19:07 crc kubenswrapper[4774]: E1121 14:19:07.886293 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="extract-content" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886301 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="extract-content" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886510 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ba443c5-8c51-419d-b520-7da8a1df2159" containerName="registry-server" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.886532 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0df44fe7-135d-4fdb-8b90-33a7e30b16cc" containerName="registry-server" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.889449 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh"] Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.889664 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.890699 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.892391 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.892650 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-k2r5d" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.892931 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.903225 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh"] Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.904379 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.995150 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-gvjv6"] Nov 21 14:19:07 crc kubenswrapper[4774]: I1121 14:19:07.996491 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.000705 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.000922 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.001351 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.001384 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-xrw2n" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.019065 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-575gd"] Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.019928 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.024106 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026499 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-reloader\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026548 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-metrics\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026596 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-metrics-certs\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026618 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-conf\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026647 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-sockets\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026673 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/88f1b63d-3ba8-494f-9331-1e50303360dd-cert\") pod \"frr-k8s-webhook-server-6998585d5-7mgxh\" (UID: \"88f1b63d-3ba8-494f-9331-1e50303360dd\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026698 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gbq4\" (UniqueName: \"kubernetes.io/projected/88f1b63d-3ba8-494f-9331-1e50303360dd-kube-api-access-8gbq4\") pod \"frr-k8s-webhook-server-6998585d5-7mgxh\" (UID: \"88f1b63d-3ba8-494f-9331-1e50303360dd\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026727 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-startup\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.026755 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw76b\" (UniqueName: \"kubernetes.io/projected/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-kube-api-access-kw76b\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.043097 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-575gd"] Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128235 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-startup\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128304 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw76b\" (UniqueName: \"kubernetes.io/projected/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-kube-api-access-kw76b\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128359 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-reloader\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128382 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ll9cg\" (UniqueName: \"kubernetes.io/projected/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-kube-api-access-ll9cg\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128408 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-metallb-excludel2\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128431 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-metrics\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128453 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-metrics-certs\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128487 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-metrics-certs\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128508 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-conf\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128524 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128540 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-cert\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128562 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-sockets\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128583 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-metrics-certs\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128603 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/88f1b63d-3ba8-494f-9331-1e50303360dd-cert\") pod \"frr-k8s-webhook-server-6998585d5-7mgxh\" (UID: \"88f1b63d-3ba8-494f-9331-1e50303360dd\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128622 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb2l6\" (UniqueName: \"kubernetes.io/projected/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-kube-api-access-sb2l6\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.128642 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gbq4\" (UniqueName: \"kubernetes.io/projected/88f1b63d-3ba8-494f-9331-1e50303360dd-kube-api-access-8gbq4\") pod \"frr-k8s-webhook-server-6998585d5-7mgxh\" (UID: \"88f1b63d-3ba8-494f-9331-1e50303360dd\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.129360 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-reloader\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.129609 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-startup\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.129626 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-sockets\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.129865 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-frr-conf\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.129981 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-metrics\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.137204 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/88f1b63d-3ba8-494f-9331-1e50303360dd-cert\") pod \"frr-k8s-webhook-server-6998585d5-7mgxh\" (UID: \"88f1b63d-3ba8-494f-9331-1e50303360dd\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.137298 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-metrics-certs\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.149281 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gbq4\" (UniqueName: \"kubernetes.io/projected/88f1b63d-3ba8-494f-9331-1e50303360dd-kube-api-access-8gbq4\") pod \"frr-k8s-webhook-server-6998585d5-7mgxh\" (UID: \"88f1b63d-3ba8-494f-9331-1e50303360dd\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.150524 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw76b\" (UniqueName: \"kubernetes.io/projected/a2e96c23-b50a-4c74-9bac-e46d87db1d0a-kube-api-access-kw76b\") pod \"frr-k8s-vxqzf\" (UID: \"a2e96c23-b50a-4c74-9bac-e46d87db1d0a\") " pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.216141 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229638 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229705 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-cert\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229749 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-metrics-certs\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229776 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb2l6\" (UniqueName: \"kubernetes.io/projected/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-kube-api-access-sb2l6\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229873 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ll9cg\" (UniqueName: \"kubernetes.io/projected/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-kube-api-access-ll9cg\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229904 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-metallb-excludel2\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229945 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-metrics-certs\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.229944 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:08 crc kubenswrapper[4774]: E1121 14:19:08.231249 4774 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 21 14:19:08 crc kubenswrapper[4774]: E1121 14:19:08.231319 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist podName:03b7ac83-7cfb-4adb-9460-e9dd4c2c911a nodeName:}" failed. No retries permitted until 2025-11-21 14:19:08.73129133 +0000 UTC m=+939.383490769 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist") pod "speaker-gvjv6" (UID: "03b7ac83-7cfb-4adb-9460-e9dd4c2c911a") : secret "metallb-memberlist" not found Nov 21 14:19:08 crc kubenswrapper[4774]: E1121 14:19:08.231912 4774 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Nov 21 14:19:08 crc kubenswrapper[4774]: E1121 14:19:08.231963 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-metrics-certs podName:b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9 nodeName:}" failed. No retries permitted until 2025-11-21 14:19:08.731948739 +0000 UTC m=+939.384148008 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-metrics-certs") pod "controller-6c7b4b5f48-575gd" (UID: "b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9") : secret "controller-certs-secret" not found Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.233590 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-metallb-excludel2\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.238028 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-metrics-certs\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.240247 4774 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.247278 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-cert\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.264194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ll9cg\" (UniqueName: \"kubernetes.io/projected/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-kube-api-access-ll9cg\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.277582 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb2l6\" (UniqueName: \"kubernetes.io/projected/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-kube-api-access-sb2l6\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.473529 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh"] Nov 21 14:19:08 crc kubenswrapper[4774]: W1121 14:19:08.479796 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88f1b63d_3ba8_494f_9331_1e50303360dd.slice/crio-653f090067eb655f484d268fc715bb49658a1d963b3abd8e7da76d6876cbe802 WatchSource:0}: Error finding container 653f090067eb655f484d268fc715bb49658a1d963b3abd8e7da76d6876cbe802: Status 404 returned error can't find the container with id 653f090067eb655f484d268fc715bb49658a1d963b3abd8e7da76d6876cbe802 Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.737417 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-metrics-certs\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.737561 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:08 crc kubenswrapper[4774]: E1121 14:19:08.737743 4774 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 21 14:19:08 crc kubenswrapper[4774]: E1121 14:19:08.737839 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist podName:03b7ac83-7cfb-4adb-9460-e9dd4c2c911a nodeName:}" failed. No retries permitted until 2025-11-21 14:19:09.737794594 +0000 UTC m=+940.389993843 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist") pod "speaker-gvjv6" (UID: "03b7ac83-7cfb-4adb-9460-e9dd4c2c911a") : secret "metallb-memberlist" not found Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.744005 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9-metrics-certs\") pod \"controller-6c7b4b5f48-575gd\" (UID: \"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9\") " pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.755190 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" event={"ID":"88f1b63d-3ba8-494f-9331-1e50303360dd","Type":"ContainerStarted","Data":"653f090067eb655f484d268fc715bb49658a1d963b3abd8e7da76d6876cbe802"} Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.756177 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"3ba0e5dcbe03ab185ec2a60c40bf877b50d913d58e821e31abcc6159b1da44c6"} Nov 21 14:19:08 crc kubenswrapper[4774]: I1121 14:19:08.938551 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.212339 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-575gd"] Nov 21 14:19:09 crc kubenswrapper[4774]: W1121 14:19:09.214731 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb41e9d86_4f0e_4cee_b6f3_44b53a4a12a9.slice/crio-dd4b0a0dd20d878501d140aeedb2aa4f81ec2f84a72a467eb3a089c6c2d3a497 WatchSource:0}: Error finding container dd4b0a0dd20d878501d140aeedb2aa4f81ec2f84a72a467eb3a089c6c2d3a497: Status 404 returned error can't find the container with id dd4b0a0dd20d878501d140aeedb2aa4f81ec2f84a72a467eb3a089c6c2d3a497 Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.751985 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.759537 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/03b7ac83-7cfb-4adb-9460-e9dd4c2c911a-memberlist\") pod \"speaker-gvjv6\" (UID: \"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a\") " pod="metallb-system/speaker-gvjv6" Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.763956 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-575gd" event={"ID":"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9","Type":"ContainerStarted","Data":"251f53aff449904dcf8d78952d388afcff11d07208e7bf784abece7a1172e632"} Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.764032 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-575gd" event={"ID":"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9","Type":"ContainerStarted","Data":"4fd9cc353df2e173fe6551f8e68c54987513a9877299ebbecb51a7b0b0997945"} Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.764048 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-575gd" event={"ID":"b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9","Type":"ContainerStarted","Data":"dd4b0a0dd20d878501d140aeedb2aa4f81ec2f84a72a467eb3a089c6c2d3a497"} Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.765317 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.785562 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-575gd" podStartSLOduration=2.785534664 podStartE2EDuration="2.785534664s" podCreationTimestamp="2025-11-21 14:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:19:09.78157697 +0000 UTC m=+940.433776239" watchObservedRunningTime="2025-11-21 14:19:09.785534664 +0000 UTC m=+940.437733923" Nov 21 14:19:09 crc kubenswrapper[4774]: I1121 14:19:09.812957 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-gvjv6" Nov 21 14:19:09 crc kubenswrapper[4774]: W1121 14:19:09.841874 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03b7ac83_7cfb_4adb_9460_e9dd4c2c911a.slice/crio-0613a3b117aacf5e1124554243ea5b3b7bd6cc6c456b616c8d20a5a6133a195c WatchSource:0}: Error finding container 0613a3b117aacf5e1124554243ea5b3b7bd6cc6c456b616c8d20a5a6133a195c: Status 404 returned error can't find the container with id 0613a3b117aacf5e1124554243ea5b3b7bd6cc6c456b616c8d20a5a6133a195c Nov 21 14:19:10 crc kubenswrapper[4774]: I1121 14:19:10.773313 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gvjv6" event={"ID":"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a","Type":"ContainerStarted","Data":"45b7b25a3e24fbfb697a1a07b665ea6cd469498e2de2c6117a7a153757dc3fd9"} Nov 21 14:19:10 crc kubenswrapper[4774]: I1121 14:19:10.773699 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gvjv6" event={"ID":"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a","Type":"ContainerStarted","Data":"80a84b57a37b9d5b71cfa0d81f1fd218546161310950cd781bc121ee1858e9b0"} Nov 21 14:19:10 crc kubenswrapper[4774]: I1121 14:19:10.773717 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-gvjv6" event={"ID":"03b7ac83-7cfb-4adb-9460-e9dd4c2c911a","Type":"ContainerStarted","Data":"0613a3b117aacf5e1124554243ea5b3b7bd6cc6c456b616c8d20a5a6133a195c"} Nov 21 14:19:10 crc kubenswrapper[4774]: I1121 14:19:10.774076 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-gvjv6" Nov 21 14:19:10 crc kubenswrapper[4774]: I1121 14:19:10.816125 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-gvjv6" podStartSLOduration=3.816092312 podStartE2EDuration="3.816092312s" podCreationTimestamp="2025-11-21 14:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:19:10.814794665 +0000 UTC m=+941.466993934" watchObservedRunningTime="2025-11-21 14:19:10.816092312 +0000 UTC m=+941.468291571" Nov 21 14:19:16 crc kubenswrapper[4774]: I1121 14:19:16.844001 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" event={"ID":"88f1b63d-3ba8-494f-9331-1e50303360dd","Type":"ContainerStarted","Data":"9effea95f828d6f3f6088d8594dfb5d94cd9d383b7a8c9b1ca12b23b04ba5b03"} Nov 21 14:19:16 crc kubenswrapper[4774]: I1121 14:19:16.844987 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:16 crc kubenswrapper[4774]: I1121 14:19:16.846187 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2e96c23-b50a-4c74-9bac-e46d87db1d0a" containerID="35c19b835e643e8accd0e7b1c0cbd1ce4da66b75d41c50752c520cbd91d67464" exitCode=0 Nov 21 14:19:16 crc kubenswrapper[4774]: I1121 14:19:16.846224 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerDied","Data":"35c19b835e643e8accd0e7b1c0cbd1ce4da66b75d41c50752c520cbd91d67464"} Nov 21 14:19:16 crc kubenswrapper[4774]: I1121 14:19:16.864496 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" podStartSLOduration=2.035593565 podStartE2EDuration="9.864463959s" podCreationTimestamp="2025-11-21 14:19:07 +0000 UTC" firstStartedPulling="2025-11-21 14:19:08.482127518 +0000 UTC m=+939.134326777" lastFinishedPulling="2025-11-21 14:19:16.310997912 +0000 UTC m=+946.963197171" observedRunningTime="2025-11-21 14:19:16.861328069 +0000 UTC m=+947.513527338" watchObservedRunningTime="2025-11-21 14:19:16.864463959 +0000 UTC m=+947.516663218" Nov 21 14:19:17 crc kubenswrapper[4774]: I1121 14:19:17.861923 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2e96c23-b50a-4c74-9bac-e46d87db1d0a" containerID="2f2c67d3fe6ca13f62428b12cfbe19b14b8156f8caf336e546af76ddeac06e98" exitCode=0 Nov 21 14:19:17 crc kubenswrapper[4774]: I1121 14:19:17.862010 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerDied","Data":"2f2c67d3fe6ca13f62428b12cfbe19b14b8156f8caf336e546af76ddeac06e98"} Nov 21 14:19:18 crc kubenswrapper[4774]: I1121 14:19:18.879708 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2e96c23-b50a-4c74-9bac-e46d87db1d0a" containerID="45dabb21907da54e5880c38948f34628dec888020544208c5badbf43b83d94e6" exitCode=0 Nov 21 14:19:18 crc kubenswrapper[4774]: I1121 14:19:18.879781 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerDied","Data":"45dabb21907da54e5880c38948f34628dec888020544208c5badbf43b83d94e6"} Nov 21 14:19:19 crc kubenswrapper[4774]: I1121 14:19:19.906398 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"d512049a41ca1a3ca5e36cf95ceac59c02fa475158e20d578edbe8eb9c8a713f"} Nov 21 14:19:19 crc kubenswrapper[4774]: I1121 14:19:19.906951 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"491ae4e1331bb19dd2c765fa43f62b702c53d302115d3a60003c4a41c7bfb6ab"} Nov 21 14:19:19 crc kubenswrapper[4774]: I1121 14:19:19.906964 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"631bae2d200c8963dce97288834bc682e9e4f2ab27e956a5e8828b6c254a05d7"} Nov 21 14:19:19 crc kubenswrapper[4774]: I1121 14:19:19.906974 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"fcd69d63430ee5d313799453fd2907d3b863fb1196751bb5ce940f2bc9906f37"} Nov 21 14:19:19 crc kubenswrapper[4774]: I1121 14:19:19.906983 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"2f0f93d49f7981b4b2374ab3bd7d9e01e5206f628fcb8e9464e0795cba4b4018"} Nov 21 14:19:20 crc kubenswrapper[4774]: I1121 14:19:20.919736 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-vxqzf" event={"ID":"a2e96c23-b50a-4c74-9bac-e46d87db1d0a","Type":"ContainerStarted","Data":"548137e35f8d199dcff6b0fca3d309769bcfa0e3741764c21367c69cbf756da3"} Nov 21 14:19:20 crc kubenswrapper[4774]: I1121 14:19:20.920258 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:20 crc kubenswrapper[4774]: I1121 14:19:20.950253 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-vxqzf" podStartSLOduration=6.0786088209999996 podStartE2EDuration="13.950232019s" podCreationTimestamp="2025-11-21 14:19:07 +0000 UTC" firstStartedPulling="2025-11-21 14:19:08.42489964 +0000 UTC m=+939.077098889" lastFinishedPulling="2025-11-21 14:19:16.296522828 +0000 UTC m=+946.948722087" observedRunningTime="2025-11-21 14:19:20.948054476 +0000 UTC m=+951.600253735" watchObservedRunningTime="2025-11-21 14:19:20.950232019 +0000 UTC m=+951.602431278" Nov 21 14:19:23 crc kubenswrapper[4774]: I1121 14:19:23.216686 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:23 crc kubenswrapper[4774]: I1121 14:19:23.258694 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:28 crc kubenswrapper[4774]: I1121 14:19:28.239451 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-7mgxh" Nov 21 14:19:28 crc kubenswrapper[4774]: I1121 14:19:28.946789 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-575gd" Nov 21 14:19:29 crc kubenswrapper[4774]: I1121 14:19:29.601385 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:19:29 crc kubenswrapper[4774]: I1121 14:19:29.601484 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:19:29 crc kubenswrapper[4774]: I1121 14:19:29.601556 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:19:29 crc kubenswrapper[4774]: I1121 14:19:29.602520 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"60358f57ea897b7d0cc072aaadbd84c8627ffc28289a543329a6b20ec347a65d"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:19:29 crc kubenswrapper[4774]: I1121 14:19:29.602620 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://60358f57ea897b7d0cc072aaadbd84c8627ffc28289a543329a6b20ec347a65d" gracePeriod=600 Nov 21 14:19:29 crc kubenswrapper[4774]: I1121 14:19:29.818843 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-gvjv6" Nov 21 14:19:30 crc kubenswrapper[4774]: I1121 14:19:29.999978 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="60358f57ea897b7d0cc072aaadbd84c8627ffc28289a543329a6b20ec347a65d" exitCode=0 Nov 21 14:19:30 crc kubenswrapper[4774]: I1121 14:19:30.000038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"60358f57ea897b7d0cc072aaadbd84c8627ffc28289a543329a6b20ec347a65d"} Nov 21 14:19:30 crc kubenswrapper[4774]: I1121 14:19:30.000087 4774 scope.go:117] "RemoveContainer" containerID="bb913418006b884e0ea3b932253e39752b8814882b052669be5898cc2a7736b4" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.156131 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94"] Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.158516 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.160674 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.169208 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94"] Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.245384 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.245457 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfsln\" (UniqueName: \"kubernetes.io/projected/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-kube-api-access-zfsln\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.245519 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.347596 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.347705 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.347750 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfsln\" (UniqueName: \"kubernetes.io/projected/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-kube-api-access-zfsln\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.348414 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.550398 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.559698 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfsln\" (UniqueName: \"kubernetes.io/projected/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-kube-api-access-zfsln\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:31 crc kubenswrapper[4774]: I1121 14:19:31.799217 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:32 crc kubenswrapper[4774]: I1121 14:19:32.037326 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"3354716a800c28fc56d313636d4868697077dddaabfb1fc36da33f6ee413381b"} Nov 21 14:19:32 crc kubenswrapper[4774]: I1121 14:19:32.326462 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94"] Nov 21 14:19:33 crc kubenswrapper[4774]: I1121 14:19:33.046400 4774 generic.go:334] "Generic (PLEG): container finished" podID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerID="fc6be1f3464539246de7aca0da504e912ffcb285bafecd2ffa0d89e98209565d" exitCode=0 Nov 21 14:19:33 crc kubenswrapper[4774]: I1121 14:19:33.046510 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" event={"ID":"a87574a6-adb5-41d6-935f-5f1e7ce90e7f","Type":"ContainerDied","Data":"fc6be1f3464539246de7aca0da504e912ffcb285bafecd2ffa0d89e98209565d"} Nov 21 14:19:33 crc kubenswrapper[4774]: I1121 14:19:33.046898 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" event={"ID":"a87574a6-adb5-41d6-935f-5f1e7ce90e7f","Type":"ContainerStarted","Data":"2f253c541cc8a3473486b415ee7cf141c7f097e79336644ec12758c8db810eab"} Nov 21 14:19:36 crc kubenswrapper[4774]: I1121 14:19:36.069774 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" event={"ID":"a87574a6-adb5-41d6-935f-5f1e7ce90e7f","Type":"ContainerStarted","Data":"8df74f4bb27fb915a4788e86086e871976980ae8a7c4303ae6ea527f2067de7b"} Nov 21 14:19:37 crc kubenswrapper[4774]: I1121 14:19:37.080560 4774 generic.go:334] "Generic (PLEG): container finished" podID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerID="8df74f4bb27fb915a4788e86086e871976980ae8a7c4303ae6ea527f2067de7b" exitCode=0 Nov 21 14:19:37 crc kubenswrapper[4774]: I1121 14:19:37.080629 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" event={"ID":"a87574a6-adb5-41d6-935f-5f1e7ce90e7f","Type":"ContainerDied","Data":"8df74f4bb27fb915a4788e86086e871976980ae8a7c4303ae6ea527f2067de7b"} Nov 21 14:19:38 crc kubenswrapper[4774]: I1121 14:19:38.093214 4774 generic.go:334] "Generic (PLEG): container finished" podID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerID="f847185d3329b87e214383816f2f3ff7d9734eb75b0ad967e7669e67bd6d7f45" exitCode=0 Nov 21 14:19:38 crc kubenswrapper[4774]: I1121 14:19:38.104800 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" event={"ID":"a87574a6-adb5-41d6-935f-5f1e7ce90e7f","Type":"ContainerDied","Data":"f847185d3329b87e214383816f2f3ff7d9734eb75b0ad967e7669e67bd6d7f45"} Nov 21 14:19:38 crc kubenswrapper[4774]: I1121 14:19:38.220379 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-vxqzf" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.402972 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.479552 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-bundle\") pod \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.479786 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-util\") pod \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.479887 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfsln\" (UniqueName: \"kubernetes.io/projected/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-kube-api-access-zfsln\") pod \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\" (UID: \"a87574a6-adb5-41d6-935f-5f1e7ce90e7f\") " Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.482601 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-bundle" (OuterVolumeSpecName: "bundle") pod "a87574a6-adb5-41d6-935f-5f1e7ce90e7f" (UID: "a87574a6-adb5-41d6-935f-5f1e7ce90e7f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.488468 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-kube-api-access-zfsln" (OuterVolumeSpecName: "kube-api-access-zfsln") pod "a87574a6-adb5-41d6-935f-5f1e7ce90e7f" (UID: "a87574a6-adb5-41d6-935f-5f1e7ce90e7f"). InnerVolumeSpecName "kube-api-access-zfsln". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.491454 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-util" (OuterVolumeSpecName: "util") pod "a87574a6-adb5-41d6-935f-5f1e7ce90e7f" (UID: "a87574a6-adb5-41d6-935f-5f1e7ce90e7f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.581679 4774 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-util\") on node \"crc\" DevicePath \"\"" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.581740 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfsln\" (UniqueName: \"kubernetes.io/projected/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-kube-api-access-zfsln\") on node \"crc\" DevicePath \"\"" Nov 21 14:19:39 crc kubenswrapper[4774]: I1121 14:19:39.581755 4774 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a87574a6-adb5-41d6-935f-5f1e7ce90e7f-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:19:40 crc kubenswrapper[4774]: I1121 14:19:40.116960 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" event={"ID":"a87574a6-adb5-41d6-935f-5f1e7ce90e7f","Type":"ContainerDied","Data":"2f253c541cc8a3473486b415ee7cf141c7f097e79336644ec12758c8db810eab"} Nov 21 14:19:40 crc kubenswrapper[4774]: I1121 14:19:40.117041 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f253c541cc8a3473486b415ee7cf141c7f097e79336644ec12758c8db810eab" Nov 21 14:19:40 crc kubenswrapper[4774]: I1121 14:19:40.117038 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.959887 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr"] Nov 21 14:19:43 crc kubenswrapper[4774]: E1121 14:19:43.962784 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="pull" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.962915 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="pull" Nov 21 14:19:43 crc kubenswrapper[4774]: E1121 14:19:43.962998 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="util" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.963078 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="util" Nov 21 14:19:43 crc kubenswrapper[4774]: E1121 14:19:43.963152 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="extract" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.963221 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="extract" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.963467 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a87574a6-adb5-41d6-935f-5f1e7ce90e7f" containerName="extract" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.964350 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.966957 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.967424 4774 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-gjvtw" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.967603 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Nov 21 14:19:43 crc kubenswrapper[4774]: I1121 14:19:43.980123 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr"] Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.050747 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e43d640d-4b47-42af-9a50-dd2916939d35-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-jzxwr\" (UID: \"e43d640d-4b47-42af-9a50-dd2916939d35\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.051230 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2tr7\" (UniqueName: \"kubernetes.io/projected/e43d640d-4b47-42af-9a50-dd2916939d35-kube-api-access-z2tr7\") pod \"cert-manager-operator-controller-manager-64cf6dff88-jzxwr\" (UID: \"e43d640d-4b47-42af-9a50-dd2916939d35\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.153193 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e43d640d-4b47-42af-9a50-dd2916939d35-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-jzxwr\" (UID: \"e43d640d-4b47-42af-9a50-dd2916939d35\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.153646 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2tr7\" (UniqueName: \"kubernetes.io/projected/e43d640d-4b47-42af-9a50-dd2916939d35-kube-api-access-z2tr7\") pod \"cert-manager-operator-controller-manager-64cf6dff88-jzxwr\" (UID: \"e43d640d-4b47-42af-9a50-dd2916939d35\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.153851 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e43d640d-4b47-42af-9a50-dd2916939d35-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-jzxwr\" (UID: \"e43d640d-4b47-42af-9a50-dd2916939d35\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.176307 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2tr7\" (UniqueName: \"kubernetes.io/projected/e43d640d-4b47-42af-9a50-dd2916939d35-kube-api-access-z2tr7\") pod \"cert-manager-operator-controller-manager-64cf6dff88-jzxwr\" (UID: \"e43d640d-4b47-42af-9a50-dd2916939d35\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.287972 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" Nov 21 14:19:44 crc kubenswrapper[4774]: I1121 14:19:44.570379 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr"] Nov 21 14:19:45 crc kubenswrapper[4774]: I1121 14:19:45.148477 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" event={"ID":"e43d640d-4b47-42af-9a50-dd2916939d35","Type":"ContainerStarted","Data":"cd68051b62141d07d3def558ed56bdb095e62f05283cd7617f635d1576278db2"} Nov 21 14:19:53 crc kubenswrapper[4774]: I1121 14:19:53.241970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" event={"ID":"e43d640d-4b47-42af-9a50-dd2916939d35","Type":"ContainerStarted","Data":"235fb16fae309ec935f7e1e4ff3a41b88af7f45c759eaa05780d8936175b44ed"} Nov 21 14:19:53 crc kubenswrapper[4774]: I1121 14:19:53.263723 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-jzxwr" podStartSLOduration=2.5228188769999997 podStartE2EDuration="10.263697456s" podCreationTimestamp="2025-11-21 14:19:43 +0000 UTC" firstStartedPulling="2025-11-21 14:19:44.590273684 +0000 UTC m=+975.242472943" lastFinishedPulling="2025-11-21 14:19:52.331152253 +0000 UTC m=+982.983351522" observedRunningTime="2025-11-21 14:19:53.261539404 +0000 UTC m=+983.913738673" watchObservedRunningTime="2025-11-21 14:19:53.263697456 +0000 UTC m=+983.915896715" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.418116 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-dn8mb"] Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.419148 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.421805 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.422164 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.424619 4774 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-8fznv" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.433614 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-dn8mb"] Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.471696 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da46cbb6-cd5f-4d50-ad76-23fc875189cb-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-dn8mb\" (UID: \"da46cbb6-cd5f-4d50-ad76-23fc875189cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.471790 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fb6b\" (UniqueName: \"kubernetes.io/projected/da46cbb6-cd5f-4d50-ad76-23fc875189cb-kube-api-access-8fb6b\") pod \"cert-manager-webhook-f4fb5df64-dn8mb\" (UID: \"da46cbb6-cd5f-4d50-ad76-23fc875189cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.573287 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da46cbb6-cd5f-4d50-ad76-23fc875189cb-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-dn8mb\" (UID: \"da46cbb6-cd5f-4d50-ad76-23fc875189cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.573414 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fb6b\" (UniqueName: \"kubernetes.io/projected/da46cbb6-cd5f-4d50-ad76-23fc875189cb-kube-api-access-8fb6b\") pod \"cert-manager-webhook-f4fb5df64-dn8mb\" (UID: \"da46cbb6-cd5f-4d50-ad76-23fc875189cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.605541 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/da46cbb6-cd5f-4d50-ad76-23fc875189cb-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-dn8mb\" (UID: \"da46cbb6-cd5f-4d50-ad76-23fc875189cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.613953 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fb6b\" (UniqueName: \"kubernetes.io/projected/da46cbb6-cd5f-4d50-ad76-23fc875189cb-kube-api-access-8fb6b\") pod \"cert-manager-webhook-f4fb5df64-dn8mb\" (UID: \"da46cbb6-cd5f-4d50-ad76-23fc875189cb\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:56 crc kubenswrapper[4774]: I1121 14:19:56.739438 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:19:57 crc kubenswrapper[4774]: I1121 14:19:57.177978 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-dn8mb"] Nov 21 14:19:57 crc kubenswrapper[4774]: I1121 14:19:57.267262 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" event={"ID":"da46cbb6-cd5f-4d50-ad76-23fc875189cb","Type":"ContainerStarted","Data":"af17b6beb739a443046ce0d8d988edd01927ec7f3068b329a59597e2dde4164f"} Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.570705 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8"] Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.573148 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.576571 4774 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-s29qb" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.583260 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8"] Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.707622 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86914560-07d8-4113-90f0-e5549a06d52f-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sg8g8\" (UID: \"86914560-07d8-4113-90f0-e5549a06d52f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.707722 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg448\" (UniqueName: \"kubernetes.io/projected/86914560-07d8-4113-90f0-e5549a06d52f-kube-api-access-cg448\") pod \"cert-manager-cainjector-855d9ccff4-sg8g8\" (UID: \"86914560-07d8-4113-90f0-e5549a06d52f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.809543 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86914560-07d8-4113-90f0-e5549a06d52f-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sg8g8\" (UID: \"86914560-07d8-4113-90f0-e5549a06d52f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.809619 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg448\" (UniqueName: \"kubernetes.io/projected/86914560-07d8-4113-90f0-e5549a06d52f-kube-api-access-cg448\") pod \"cert-manager-cainjector-855d9ccff4-sg8g8\" (UID: \"86914560-07d8-4113-90f0-e5549a06d52f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.840189 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg448\" (UniqueName: \"kubernetes.io/projected/86914560-07d8-4113-90f0-e5549a06d52f-kube-api-access-cg448\") pod \"cert-manager-cainjector-855d9ccff4-sg8g8\" (UID: \"86914560-07d8-4113-90f0-e5549a06d52f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.841988 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/86914560-07d8-4113-90f0-e5549a06d52f-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-sg8g8\" (UID: \"86914560-07d8-4113-90f0-e5549a06d52f\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:58 crc kubenswrapper[4774]: I1121 14:19:58.903907 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" Nov 21 14:19:59 crc kubenswrapper[4774]: I1121 14:19:59.398248 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8"] Nov 21 14:19:59 crc kubenswrapper[4774]: W1121 14:19:59.411236 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86914560_07d8_4113_90f0_e5549a06d52f.slice/crio-d59dda0a294c7faa6f0a291b3c1bbe02abd27e56b7c2d966528c2869d2eaf76d WatchSource:0}: Error finding container d59dda0a294c7faa6f0a291b3c1bbe02abd27e56b7c2d966528c2869d2eaf76d: Status 404 returned error can't find the container with id d59dda0a294c7faa6f0a291b3c1bbe02abd27e56b7c2d966528c2869d2eaf76d Nov 21 14:20:00 crc kubenswrapper[4774]: I1121 14:20:00.293922 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" event={"ID":"86914560-07d8-4113-90f0-e5549a06d52f","Type":"ContainerStarted","Data":"d59dda0a294c7faa6f0a291b3c1bbe02abd27e56b7c2d966528c2869d2eaf76d"} Nov 21 14:20:06 crc kubenswrapper[4774]: I1121 14:20:06.347933 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" event={"ID":"86914560-07d8-4113-90f0-e5549a06d52f","Type":"ContainerStarted","Data":"3757cb83bc8d84da2557a5600d0d9f2974ed7c3c200754dccf927ba06d9412de"} Nov 21 14:20:06 crc kubenswrapper[4774]: I1121 14:20:06.353241 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" event={"ID":"da46cbb6-cd5f-4d50-ad76-23fc875189cb","Type":"ContainerStarted","Data":"75a1ab36ac71b46c8d4adb9175c9a9e7d682c29e93767ab4cdbef992212d50e6"} Nov 21 14:20:06 crc kubenswrapper[4774]: I1121 14:20:06.353420 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:20:06 crc kubenswrapper[4774]: I1121 14:20:06.375079 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" podStartSLOduration=2.143764284 podStartE2EDuration="10.375057647s" podCreationTimestamp="2025-11-21 14:19:56 +0000 UTC" firstStartedPulling="2025-11-21 14:19:57.190613974 +0000 UTC m=+987.842813233" lastFinishedPulling="2025-11-21 14:20:05.421907337 +0000 UTC m=+996.074106596" observedRunningTime="2025-11-21 14:20:06.36918642 +0000 UTC m=+997.021385699" watchObservedRunningTime="2025-11-21 14:20:06.375057647 +0000 UTC m=+997.027256906" Nov 21 14:20:07 crc kubenswrapper[4774]: I1121 14:20:07.377595 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-sg8g8" podStartSLOduration=3.392540769 podStartE2EDuration="9.377573274s" podCreationTimestamp="2025-11-21 14:19:58 +0000 UTC" firstStartedPulling="2025-11-21 14:19:59.417189681 +0000 UTC m=+990.069388940" lastFinishedPulling="2025-11-21 14:20:05.402222186 +0000 UTC m=+996.054421445" observedRunningTime="2025-11-21 14:20:07.37637604 +0000 UTC m=+998.028575299" watchObservedRunningTime="2025-11-21 14:20:07.377573274 +0000 UTC m=+998.029772533" Nov 21 14:20:11 crc kubenswrapper[4774]: I1121 14:20:11.743110 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-dn8mb" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.314516 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-b57xc"] Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.315719 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.318121 4774 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-4hgjm" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.328017 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-b57xc"] Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.381927 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e1ea63d6-75ce-4b59-83c9-9f63a0d6f740-bound-sa-token\") pod \"cert-manager-86cb77c54b-b57xc\" (UID: \"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740\") " pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.382595 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxsn2\" (UniqueName: \"kubernetes.io/projected/e1ea63d6-75ce-4b59-83c9-9f63a0d6f740-kube-api-access-kxsn2\") pod \"cert-manager-86cb77c54b-b57xc\" (UID: \"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740\") " pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.483795 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxsn2\" (UniqueName: \"kubernetes.io/projected/e1ea63d6-75ce-4b59-83c9-9f63a0d6f740-kube-api-access-kxsn2\") pod \"cert-manager-86cb77c54b-b57xc\" (UID: \"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740\") " pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.483900 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e1ea63d6-75ce-4b59-83c9-9f63a0d6f740-bound-sa-token\") pod \"cert-manager-86cb77c54b-b57xc\" (UID: \"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740\") " pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.510249 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e1ea63d6-75ce-4b59-83c9-9f63a0d6f740-bound-sa-token\") pod \"cert-manager-86cb77c54b-b57xc\" (UID: \"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740\") " pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.514813 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxsn2\" (UniqueName: \"kubernetes.io/projected/e1ea63d6-75ce-4b59-83c9-9f63a0d6f740-kube-api-access-kxsn2\") pod \"cert-manager-86cb77c54b-b57xc\" (UID: \"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740\") " pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:15 crc kubenswrapper[4774]: I1121 14:20:15.638991 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-b57xc" Nov 21 14:20:16 crc kubenswrapper[4774]: I1121 14:20:16.068346 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-b57xc"] Nov 21 14:20:16 crc kubenswrapper[4774]: I1121 14:20:16.421400 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-b57xc" event={"ID":"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740","Type":"ContainerStarted","Data":"d6c43b6030a0f2af20ddb2050a22bdfebee7e7a32c002ecff89863128cd86c2c"} Nov 21 14:20:16 crc kubenswrapper[4774]: I1121 14:20:16.421460 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-b57xc" event={"ID":"e1ea63d6-75ce-4b59-83c9-9f63a0d6f740","Type":"ContainerStarted","Data":"419db88625022e3ded2ea637b75df62bbbccb39541e5507ee5472ffd8f41788e"} Nov 21 14:20:16 crc kubenswrapper[4774]: I1121 14:20:16.443899 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-b57xc" podStartSLOduration=1.4438710719999999 podStartE2EDuration="1.443871072s" podCreationTimestamp="2025-11-21 14:20:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:20:16.4431016 +0000 UTC m=+1007.095300859" watchObservedRunningTime="2025-11-21 14:20:16.443871072 +0000 UTC m=+1007.096070331" Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.889805 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-z4r8s"] Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.891782 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.894937 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-l2bcc" Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.895259 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.895806 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.926276 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-z4r8s"] Nov 21 14:20:24 crc kubenswrapper[4774]: I1121 14:20:24.976302 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcnkd\" (UniqueName: \"kubernetes.io/projected/4c506f77-b649-4c68-8bf7-556c764f3cce-kube-api-access-fcnkd\") pod \"openstack-operator-index-z4r8s\" (UID: \"4c506f77-b649-4c68-8bf7-556c764f3cce\") " pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:25 crc kubenswrapper[4774]: I1121 14:20:25.077598 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcnkd\" (UniqueName: \"kubernetes.io/projected/4c506f77-b649-4c68-8bf7-556c764f3cce-kube-api-access-fcnkd\") pod \"openstack-operator-index-z4r8s\" (UID: \"4c506f77-b649-4c68-8bf7-556c764f3cce\") " pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:25 crc kubenswrapper[4774]: I1121 14:20:25.098640 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcnkd\" (UniqueName: \"kubernetes.io/projected/4c506f77-b649-4c68-8bf7-556c764f3cce-kube-api-access-fcnkd\") pod \"openstack-operator-index-z4r8s\" (UID: \"4c506f77-b649-4c68-8bf7-556c764f3cce\") " pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:25 crc kubenswrapper[4774]: I1121 14:20:25.224110 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:25 crc kubenswrapper[4774]: I1121 14:20:25.661530 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-z4r8s"] Nov 21 14:20:25 crc kubenswrapper[4774]: W1121 14:20:25.668409 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4c506f77_b649_4c68_8bf7_556c764f3cce.slice/crio-11c512a30a276f0ac712a754d2ec2be8708b6131929881bdfcd64b838747f780 WatchSource:0}: Error finding container 11c512a30a276f0ac712a754d2ec2be8708b6131929881bdfcd64b838747f780: Status 404 returned error can't find the container with id 11c512a30a276f0ac712a754d2ec2be8708b6131929881bdfcd64b838747f780 Nov 21 14:20:26 crc kubenswrapper[4774]: I1121 14:20:26.488749 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4r8s" event={"ID":"4c506f77-b649-4c68-8bf7-556c764f3cce","Type":"ContainerStarted","Data":"11c512a30a276f0ac712a754d2ec2be8708b6131929881bdfcd64b838747f780"} Nov 21 14:20:27 crc kubenswrapper[4774]: I1121 14:20:27.499656 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4r8s" event={"ID":"4c506f77-b649-4c68-8bf7-556c764f3cce","Type":"ContainerStarted","Data":"b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a"} Nov 21 14:20:27 crc kubenswrapper[4774]: I1121 14:20:27.526017 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-z4r8s" podStartSLOduration=2.595132386 podStartE2EDuration="3.52599134s" podCreationTimestamp="2025-11-21 14:20:24 +0000 UTC" firstStartedPulling="2025-11-21 14:20:25.671118896 +0000 UTC m=+1016.323318145" lastFinishedPulling="2025-11-21 14:20:26.60197784 +0000 UTC m=+1017.254177099" observedRunningTime="2025-11-21 14:20:27.520899715 +0000 UTC m=+1018.173098984" watchObservedRunningTime="2025-11-21 14:20:27.52599134 +0000 UTC m=+1018.178190599" Nov 21 14:20:27 crc kubenswrapper[4774]: I1121 14:20:27.654644 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-z4r8s"] Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.260793 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-fqdjt"] Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.262412 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.275109 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fqdjt"] Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.327694 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md8gt\" (UniqueName: \"kubernetes.io/projected/8f966582-0f84-41c5-ad5a-b96988e4368e-kube-api-access-md8gt\") pod \"openstack-operator-index-fqdjt\" (UID: \"8f966582-0f84-41c5-ad5a-b96988e4368e\") " pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.429996 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md8gt\" (UniqueName: \"kubernetes.io/projected/8f966582-0f84-41c5-ad5a-b96988e4368e-kube-api-access-md8gt\") pod \"openstack-operator-index-fqdjt\" (UID: \"8f966582-0f84-41c5-ad5a-b96988e4368e\") " pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.453877 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md8gt\" (UniqueName: \"kubernetes.io/projected/8f966582-0f84-41c5-ad5a-b96988e4368e-kube-api-access-md8gt\") pod \"openstack-operator-index-fqdjt\" (UID: \"8f966582-0f84-41c5-ad5a-b96988e4368e\") " pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.581316 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:28 crc kubenswrapper[4774]: I1121 14:20:28.868104 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-fqdjt"] Nov 21 14:20:29 crc kubenswrapper[4774]: I1121 14:20:29.517330 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fqdjt" event={"ID":"8f966582-0f84-41c5-ad5a-b96988e4368e","Type":"ContainerStarted","Data":"5ad683f7aa2a54e3de00ca40c6c085d05ef74d61ef178201c2b39ce77c1cb3d6"} Nov 21 14:20:29 crc kubenswrapper[4774]: I1121 14:20:29.517742 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-fqdjt" event={"ID":"8f966582-0f84-41c5-ad5a-b96988e4368e","Type":"ContainerStarted","Data":"e527cd0acc5d51b12d796b3c8d0150cb926c906e56d02be3c9f81e2066a8b656"} Nov 21 14:20:29 crc kubenswrapper[4774]: I1121 14:20:29.517480 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-z4r8s" podUID="4c506f77-b649-4c68-8bf7-556c764f3cce" containerName="registry-server" containerID="cri-o://b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a" gracePeriod=2 Nov 21 14:20:29 crc kubenswrapper[4774]: I1121 14:20:29.543865 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-fqdjt" podStartSLOduration=1.10798551 podStartE2EDuration="1.543807739s" podCreationTimestamp="2025-11-21 14:20:28 +0000 UTC" firstStartedPulling="2025-11-21 14:20:28.878510531 +0000 UTC m=+1019.530709790" lastFinishedPulling="2025-11-21 14:20:29.31433276 +0000 UTC m=+1019.966532019" observedRunningTime="2025-11-21 14:20:29.540104073 +0000 UTC m=+1020.192303332" watchObservedRunningTime="2025-11-21 14:20:29.543807739 +0000 UTC m=+1020.196006998" Nov 21 14:20:29 crc kubenswrapper[4774]: I1121 14:20:29.932389 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.054876 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcnkd\" (UniqueName: \"kubernetes.io/projected/4c506f77-b649-4c68-8bf7-556c764f3cce-kube-api-access-fcnkd\") pod \"4c506f77-b649-4c68-8bf7-556c764f3cce\" (UID: \"4c506f77-b649-4c68-8bf7-556c764f3cce\") " Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.061450 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c506f77-b649-4c68-8bf7-556c764f3cce-kube-api-access-fcnkd" (OuterVolumeSpecName: "kube-api-access-fcnkd") pod "4c506f77-b649-4c68-8bf7-556c764f3cce" (UID: "4c506f77-b649-4c68-8bf7-556c764f3cce"). InnerVolumeSpecName "kube-api-access-fcnkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.158660 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcnkd\" (UniqueName: \"kubernetes.io/projected/4c506f77-b649-4c68-8bf7-556c764f3cce-kube-api-access-fcnkd\") on node \"crc\" DevicePath \"\"" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.525184 4774 generic.go:334] "Generic (PLEG): container finished" podID="4c506f77-b649-4c68-8bf7-556c764f3cce" containerID="b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a" exitCode=0 Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.525335 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-z4r8s" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.525412 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4r8s" event={"ID":"4c506f77-b649-4c68-8bf7-556c764f3cce","Type":"ContainerDied","Data":"b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a"} Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.526892 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-z4r8s" event={"ID":"4c506f77-b649-4c68-8bf7-556c764f3cce","Type":"ContainerDied","Data":"11c512a30a276f0ac712a754d2ec2be8708b6131929881bdfcd64b838747f780"} Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.526927 4774 scope.go:117] "RemoveContainer" containerID="b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.553771 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-z4r8s"] Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.553884 4774 scope.go:117] "RemoveContainer" containerID="b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a" Nov 21 14:20:30 crc kubenswrapper[4774]: E1121 14:20:30.554330 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a\": container with ID starting with b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a not found: ID does not exist" containerID="b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.554395 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a"} err="failed to get container status \"b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a\": rpc error: code = NotFound desc = could not find container \"b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a\": container with ID starting with b1a1c88b3a0f956ddbe51b143885c320beb0188947effaf51bd64b00fdef9d4a not found: ID does not exist" Nov 21 14:20:30 crc kubenswrapper[4774]: I1121 14:20:30.558026 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-z4r8s"] Nov 21 14:20:32 crc kubenswrapper[4774]: I1121 14:20:32.102263 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c506f77-b649-4c68-8bf7-556c764f3cce" path="/var/lib/kubelet/pods/4c506f77-b649-4c68-8bf7-556c764f3cce/volumes" Nov 21 14:20:38 crc kubenswrapper[4774]: I1121 14:20:38.581700 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:38 crc kubenswrapper[4774]: I1121 14:20:38.582540 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:38 crc kubenswrapper[4774]: I1121 14:20:38.614478 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:38 crc kubenswrapper[4774]: I1121 14:20:38.643808 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-fqdjt" Nov 21 14:20:46 crc kubenswrapper[4774]: I1121 14:20:46.905319 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f"] Nov 21 14:20:46 crc kubenswrapper[4774]: E1121 14:20:46.906758 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c506f77-b649-4c68-8bf7-556c764f3cce" containerName="registry-server" Nov 21 14:20:46 crc kubenswrapper[4774]: I1121 14:20:46.906776 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c506f77-b649-4c68-8bf7-556c764f3cce" containerName="registry-server" Nov 21 14:20:46 crc kubenswrapper[4774]: I1121 14:20:46.907136 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c506f77-b649-4c68-8bf7-556c764f3cce" containerName="registry-server" Nov 21 14:20:46 crc kubenswrapper[4774]: I1121 14:20:46.910772 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:46 crc kubenswrapper[4774]: I1121 14:20:46.916214 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-mvcwc" Nov 21 14:20:46 crc kubenswrapper[4774]: I1121 14:20:46.925059 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f"] Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.021017 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-util\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.021247 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-bundle\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.021480 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd2sz\" (UniqueName: \"kubernetes.io/projected/841c2b76-6113-4cc9-a146-67a723c67ad4-kube-api-access-jd2sz\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.123338 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-util\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.123457 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-bundle\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.123496 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd2sz\" (UniqueName: \"kubernetes.io/projected/841c2b76-6113-4cc9-a146-67a723c67ad4-kube-api-access-jd2sz\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.124190 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-util\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.124368 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-bundle\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.147080 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd2sz\" (UniqueName: \"kubernetes.io/projected/841c2b76-6113-4cc9-a146-67a723c67ad4-kube-api-access-jd2sz\") pod \"2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.241782 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.461299 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f"] Nov 21 14:20:47 crc kubenswrapper[4774]: I1121 14:20:47.656619 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" event={"ID":"841c2b76-6113-4cc9-a146-67a723c67ad4","Type":"ContainerStarted","Data":"da4cf0c5f8cf29ccf65aac961d1f1547b617ccaa8df7263eb9ef407d15548908"} Nov 21 14:20:48 crc kubenswrapper[4774]: I1121 14:20:48.665404 4774 generic.go:334] "Generic (PLEG): container finished" podID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerID="2056f1aeb532a3d7d7b0e603238cdac405c95458a37b1626f431bb55f3fecbd1" exitCode=0 Nov 21 14:20:48 crc kubenswrapper[4774]: I1121 14:20:48.665473 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" event={"ID":"841c2b76-6113-4cc9-a146-67a723c67ad4","Type":"ContainerDied","Data":"2056f1aeb532a3d7d7b0e603238cdac405c95458a37b1626f431bb55f3fecbd1"} Nov 21 14:20:50 crc kubenswrapper[4774]: I1121 14:20:50.690710 4774 generic.go:334] "Generic (PLEG): container finished" podID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerID="61e2aea91c7f798a352efec7b9a0aa47fa8fe7db05895b2773403f9ed889ea45" exitCode=0 Nov 21 14:20:50 crc kubenswrapper[4774]: I1121 14:20:50.690847 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" event={"ID":"841c2b76-6113-4cc9-a146-67a723c67ad4","Type":"ContainerDied","Data":"61e2aea91c7f798a352efec7b9a0aa47fa8fe7db05895b2773403f9ed889ea45"} Nov 21 14:20:51 crc kubenswrapper[4774]: I1121 14:20:51.703284 4774 generic.go:334] "Generic (PLEG): container finished" podID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerID="41333e531fd208d5fcb41e54b38c776115eed2ad18bc31d993009fb977cd9caf" exitCode=0 Nov 21 14:20:51 crc kubenswrapper[4774]: I1121 14:20:51.703369 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" event={"ID":"841c2b76-6113-4cc9-a146-67a723c67ad4","Type":"ContainerDied","Data":"41333e531fd208d5fcb41e54b38c776115eed2ad18bc31d993009fb977cd9caf"} Nov 21 14:20:52 crc kubenswrapper[4774]: I1121 14:20:52.946233 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.018733 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd2sz\" (UniqueName: \"kubernetes.io/projected/841c2b76-6113-4cc9-a146-67a723c67ad4-kube-api-access-jd2sz\") pod \"841c2b76-6113-4cc9-a146-67a723c67ad4\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.018923 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-bundle\") pod \"841c2b76-6113-4cc9-a146-67a723c67ad4\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.018978 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-util\") pod \"841c2b76-6113-4cc9-a146-67a723c67ad4\" (UID: \"841c2b76-6113-4cc9-a146-67a723c67ad4\") " Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.020146 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-bundle" (OuterVolumeSpecName: "bundle") pod "841c2b76-6113-4cc9-a146-67a723c67ad4" (UID: "841c2b76-6113-4cc9-a146-67a723c67ad4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.027377 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/841c2b76-6113-4cc9-a146-67a723c67ad4-kube-api-access-jd2sz" (OuterVolumeSpecName: "kube-api-access-jd2sz") pod "841c2b76-6113-4cc9-a146-67a723c67ad4" (UID: "841c2b76-6113-4cc9-a146-67a723c67ad4"). InnerVolumeSpecName "kube-api-access-jd2sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.037368 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-util" (OuterVolumeSpecName: "util") pod "841c2b76-6113-4cc9-a146-67a723c67ad4" (UID: "841c2b76-6113-4cc9-a146-67a723c67ad4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.120934 4774 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-util\") on node \"crc\" DevicePath \"\"" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.120984 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd2sz\" (UniqueName: \"kubernetes.io/projected/841c2b76-6113-4cc9-a146-67a723c67ad4-kube-api-access-jd2sz\") on node \"crc\" DevicePath \"\"" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.121000 4774 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/841c2b76-6113-4cc9-a146-67a723c67ad4-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.719083 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" event={"ID":"841c2b76-6113-4cc9-a146-67a723c67ad4","Type":"ContainerDied","Data":"da4cf0c5f8cf29ccf65aac961d1f1547b617ccaa8df7263eb9ef407d15548908"} Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.719145 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da4cf0c5f8cf29ccf65aac961d1f1547b617ccaa8df7263eb9ef407d15548908" Nov 21 14:20:53 crc kubenswrapper[4774]: I1121 14:20:53.719152 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.727026 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg"] Nov 21 14:20:56 crc kubenswrapper[4774]: E1121 14:20:56.727762 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="util" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.727777 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="util" Nov 21 14:20:56 crc kubenswrapper[4774]: E1121 14:20:56.727787 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="extract" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.727793 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="extract" Nov 21 14:20:56 crc kubenswrapper[4774]: E1121 14:20:56.727812 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="pull" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.727897 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="pull" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.728090 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="841c2b76-6113-4cc9-a146-67a723c67ad4" containerName="extract" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.729033 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.732189 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-6d6tb" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.793169 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg"] Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.881208 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctfkj\" (UniqueName: \"kubernetes.io/projected/ba0840f2-15c3-48ed-bd53-7057786f734a-kube-api-access-ctfkj\") pod \"openstack-operator-controller-operator-77c7f689f5-f5rhg\" (UID: \"ba0840f2-15c3-48ed-bd53-7057786f734a\") " pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:20:56 crc kubenswrapper[4774]: I1121 14:20:56.982732 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctfkj\" (UniqueName: \"kubernetes.io/projected/ba0840f2-15c3-48ed-bd53-7057786f734a-kube-api-access-ctfkj\") pod \"openstack-operator-controller-operator-77c7f689f5-f5rhg\" (UID: \"ba0840f2-15c3-48ed-bd53-7057786f734a\") " pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:20:57 crc kubenswrapper[4774]: I1121 14:20:57.006224 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctfkj\" (UniqueName: \"kubernetes.io/projected/ba0840f2-15c3-48ed-bd53-7057786f734a-kube-api-access-ctfkj\") pod \"openstack-operator-controller-operator-77c7f689f5-f5rhg\" (UID: \"ba0840f2-15c3-48ed-bd53-7057786f734a\") " pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:20:57 crc kubenswrapper[4774]: I1121 14:20:57.050952 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:20:57 crc kubenswrapper[4774]: I1121 14:20:57.342609 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg"] Nov 21 14:20:57 crc kubenswrapper[4774]: I1121 14:20:57.746278 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" event={"ID":"ba0840f2-15c3-48ed-bd53-7057786f734a","Type":"ContainerStarted","Data":"72ff26da8d5e88e2cfff64741d040368c47b603aabe02a750b600b0729eca0f6"} Nov 21 14:21:02 crc kubenswrapper[4774]: I1121 14:21:02.803211 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" event={"ID":"ba0840f2-15c3-48ed-bd53-7057786f734a","Type":"ContainerStarted","Data":"2b8ae2098d4f0e76f1797b2c1b64498e2c710956aa5e2addaf6f55101dc67de4"} Nov 21 14:21:05 crc kubenswrapper[4774]: I1121 14:21:05.854111 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" event={"ID":"ba0840f2-15c3-48ed-bd53-7057786f734a","Type":"ContainerStarted","Data":"8b85802099999a21392b70ea293b8b244fa5a6908e0bdfb41a4615fb69628368"} Nov 21 14:21:05 crc kubenswrapper[4774]: I1121 14:21:05.854614 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:21:05 crc kubenswrapper[4774]: I1121 14:21:05.886553 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" podStartSLOduration=2.602679718 podStartE2EDuration="9.886536674s" podCreationTimestamp="2025-11-21 14:20:56 +0000 UTC" firstStartedPulling="2025-11-21 14:20:57.359545835 +0000 UTC m=+1048.011745094" lastFinishedPulling="2025-11-21 14:21:04.643402791 +0000 UTC m=+1055.295602050" observedRunningTime="2025-11-21 14:21:05.884713262 +0000 UTC m=+1056.536912521" watchObservedRunningTime="2025-11-21 14:21:05.886536674 +0000 UTC m=+1056.538735933" Nov 21 14:21:07 crc kubenswrapper[4774]: I1121 14:21:07.055178 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-77c7f689f5-f5rhg" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.640983 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.643193 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.646661 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-cbl82" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.671251 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.689252 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.690970 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.698959 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.699261 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-xrvxc" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.700571 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.702915 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.706585 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-rqq5h" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.707705 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.717879 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.728300 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.734049 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-xxkjv" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.755289 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-799fn\" (UniqueName: \"kubernetes.io/projected/c28fdb9b-2f84-41f0-ae41-977dca177484-kube-api-access-799fn\") pod \"cinder-operator-controller-manager-6d8fd67bf7-vfpjr\" (UID: \"c28fdb9b-2f84-41f0-ae41-977dca177484\") " pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.755428 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tcpv\" (UniqueName: \"kubernetes.io/projected/f50b16b0-3430-4378-a32c-8d09f402108e-kube-api-access-8tcpv\") pod \"barbican-operator-controller-manager-7768f8c84f-jf5xt\" (UID: \"f50b16b0-3430-4378-a32c-8d09f402108e\") " pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.775230 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpmht\" (UniqueName: \"kubernetes.io/projected/cf13e842-38d2-409c-87f8-3163868965d8-kube-api-access-qpmht\") pod \"designate-operator-controller-manager-56dfb6b67f-qs9lx\" (UID: \"cf13e842-38d2-409c-87f8-3163868965d8\") " pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.845979 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.859464 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.863543 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.866230 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-vdnnf" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.877478 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-799fn\" (UniqueName: \"kubernetes.io/projected/c28fdb9b-2f84-41f0-ae41-977dca177484-kube-api-access-799fn\") pod \"cinder-operator-controller-manager-6d8fd67bf7-vfpjr\" (UID: \"c28fdb9b-2f84-41f0-ae41-977dca177484\") " pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.877574 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tcpv\" (UniqueName: \"kubernetes.io/projected/f50b16b0-3430-4378-a32c-8d09f402108e-kube-api-access-8tcpv\") pod \"barbican-operator-controller-manager-7768f8c84f-jf5xt\" (UID: \"f50b16b0-3430-4378-a32c-8d09f402108e\") " pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.877659 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpmht\" (UniqueName: \"kubernetes.io/projected/cf13e842-38d2-409c-87f8-3163868965d8-kube-api-access-qpmht\") pod \"designate-operator-controller-manager-56dfb6b67f-qs9lx\" (UID: \"cf13e842-38d2-409c-87f8-3163868965d8\") " pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.877757 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qdb4\" (UniqueName: \"kubernetes.io/projected/c2a3d34a-eca8-4106-8a2b-47254b1af44b-kube-api-access-7qdb4\") pod \"glance-operator-controller-manager-8667fbf6f6-mx2n9\" (UID: \"c2a3d34a-eca8-4106-8a2b-47254b1af44b\") " pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.888480 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.909470 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.917441 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-f4bj8" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.923447 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.932531 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-799fn\" (UniqueName: \"kubernetes.io/projected/c28fdb9b-2f84-41f0-ae41-977dca177484-kube-api-access-799fn\") pod \"cinder-operator-controller-manager-6d8fd67bf7-vfpjr\" (UID: \"c28fdb9b-2f84-41f0-ae41-977dca177484\") " pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.933867 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.936098 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tcpv\" (UniqueName: \"kubernetes.io/projected/f50b16b0-3430-4378-a32c-8d09f402108e-kube-api-access-8tcpv\") pod \"barbican-operator-controller-manager-7768f8c84f-jf5xt\" (UID: \"f50b16b0-3430-4378-a32c-8d09f402108e\") " pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.936597 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpmht\" (UniqueName: \"kubernetes.io/projected/cf13e842-38d2-409c-87f8-3163868965d8-kube-api-access-qpmht\") pod \"designate-operator-controller-manager-56dfb6b67f-qs9lx\" (UID: \"cf13e842-38d2-409c-87f8-3163868965d8\") " pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.940496 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.948497 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.948271 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.958939 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx"] Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.961025 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.967178 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.973495 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.973850 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-4mdxd" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.973989 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-psprz" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.979606 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qdb4\" (UniqueName: \"kubernetes.io/projected/c2a3d34a-eca8-4106-8a2b-47254b1af44b-kube-api-access-7qdb4\") pod \"glance-operator-controller-manager-8667fbf6f6-mx2n9\" (UID: \"c2a3d34a-eca8-4106-8a2b-47254b1af44b\") " pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.979684 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xh7cj\" (UniqueName: \"kubernetes.io/projected/496286dc-00cb-42ae-914e-4d8769847726-kube-api-access-xh7cj\") pod \"heat-operator-controller-manager-bf4c6585d-ctkn6\" (UID: \"496286dc-00cb-42ae-914e-4d8769847726\") " pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:23 crc kubenswrapper[4774]: I1121 14:21:23.979712 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fx6xk\" (UniqueName: \"kubernetes.io/projected/d22140fe-1eb2-4f64-84e5-1d3ad3902a94-kube-api-access-fx6xk\") pod \"horizon-operator-controller-manager-5d86b44686-c8wfq\" (UID: \"d22140fe-1eb2-4f64-84e5-1d3ad3902a94\") " pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.016767 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.017384 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.034171 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qdb4\" (UniqueName: \"kubernetes.io/projected/c2a3d34a-eca8-4106-8a2b-47254b1af44b-kube-api-access-7qdb4\") pod \"glance-operator-controller-manager-8667fbf6f6-mx2n9\" (UID: \"c2a3d34a-eca8-4106-8a2b-47254b1af44b\") " pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.042975 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.047301 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.048954 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.053201 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-n4w8j" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.072124 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.079032 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.080716 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.085223 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk6dq\" (UniqueName: \"kubernetes.io/projected/7e09cfc1-a56d-49fb-ac6f-f9007b4a4128-kube-api-access-kk6dq\") pod \"infra-operator-controller-manager-769d9c7585-7hqmf\" (UID: \"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128\") " pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.094859 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xh7cj\" (UniqueName: \"kubernetes.io/projected/496286dc-00cb-42ae-914e-4d8769847726-kube-api-access-xh7cj\") pod \"heat-operator-controller-manager-bf4c6585d-ctkn6\" (UID: \"496286dc-00cb-42ae-914e-4d8769847726\") " pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.095973 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fx6xk\" (UniqueName: \"kubernetes.io/projected/d22140fe-1eb2-4f64-84e5-1d3ad3902a94-kube-api-access-fx6xk\") pod \"horizon-operator-controller-manager-5d86b44686-c8wfq\" (UID: \"d22140fe-1eb2-4f64-84e5-1d3ad3902a94\") " pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.097627 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7e09cfc1-a56d-49fb-ac6f-f9007b4a4128-cert\") pod \"infra-operator-controller-manager-769d9c7585-7hqmf\" (UID: \"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128\") " pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.098184 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8b4p\" (UniqueName: \"kubernetes.io/projected/340633c8-9873-455b-9ad3-617764d7f1ad-kube-api-access-p8b4p\") pod \"ironic-operator-controller-manager-5c75d7c94b-9j9tx\" (UID: \"340633c8-9873-455b-9ad3-617764d7f1ad\") " pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.103756 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.111490 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-z7g4k" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.162712 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xh7cj\" (UniqueName: \"kubernetes.io/projected/496286dc-00cb-42ae-914e-4d8769847726-kube-api-access-xh7cj\") pod \"heat-operator-controller-manager-bf4c6585d-ctkn6\" (UID: \"496286dc-00cb-42ae-914e-4d8769847726\") " pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.187860 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.200278 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7e09cfc1-a56d-49fb-ac6f-f9007b4a4128-cert\") pod \"infra-operator-controller-manager-769d9c7585-7hqmf\" (UID: \"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128\") " pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.200338 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8b4p\" (UniqueName: \"kubernetes.io/projected/340633c8-9873-455b-9ad3-617764d7f1ad-kube-api-access-p8b4p\") pod \"ironic-operator-controller-manager-5c75d7c94b-9j9tx\" (UID: \"340633c8-9873-455b-9ad3-617764d7f1ad\") " pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.200371 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b7nb\" (UniqueName: \"kubernetes.io/projected/47c58999-f804-4a5c-bcc7-3aae79eab6da-kube-api-access-2b7nb\") pod \"manila-operator-controller-manager-7bb88cb858-7xd6m\" (UID: \"47c58999-f804-4a5c-bcc7-3aae79eab6da\") " pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.200418 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk6dq\" (UniqueName: \"kubernetes.io/projected/7e09cfc1-a56d-49fb-ac6f-f9007b4a4128-kube-api-access-kk6dq\") pod \"infra-operator-controller-manager-769d9c7585-7hqmf\" (UID: \"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128\") " pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.200608 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qqhm\" (UniqueName: \"kubernetes.io/projected/881c2298-a491-4657-9982-55fe889c9b4f-kube-api-access-2qqhm\") pod \"keystone-operator-controller-manager-7879fb76fd-7qkkw\" (UID: \"881c2298-a491-4657-9982-55fe889c9b4f\") " pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.204416 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fx6xk\" (UniqueName: \"kubernetes.io/projected/d22140fe-1eb2-4f64-84e5-1d3ad3902a94-kube-api-access-fx6xk\") pod \"horizon-operator-controller-manager-5d86b44686-c8wfq\" (UID: \"d22140fe-1eb2-4f64-84e5-1d3ad3902a94\") " pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.223169 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7e09cfc1-a56d-49fb-ac6f-f9007b4a4128-cert\") pod \"infra-operator-controller-manager-769d9c7585-7hqmf\" (UID: \"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128\") " pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.241699 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8b4p\" (UniqueName: \"kubernetes.io/projected/340633c8-9873-455b-9ad3-617764d7f1ad-kube-api-access-p8b4p\") pod \"ironic-operator-controller-manager-5c75d7c94b-9j9tx\" (UID: \"340633c8-9873-455b-9ad3-617764d7f1ad\") " pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.254788 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.265511 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk6dq\" (UniqueName: \"kubernetes.io/projected/7e09cfc1-a56d-49fb-ac6f-f9007b4a4128-kube-api-access-kk6dq\") pod \"infra-operator-controller-manager-769d9c7585-7hqmf\" (UID: \"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128\") " pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.289427 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.289475 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.289493 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.290358 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.295061 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.295350 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.297679 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.297919 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-qf8x8" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.298674 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.299555 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.303060 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-8tvf4" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.303356 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-hssm5" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.305947 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-x6s8t" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.306606 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.308614 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b7nb\" (UniqueName: \"kubernetes.io/projected/47c58999-f804-4a5c-bcc7-3aae79eab6da-kube-api-access-2b7nb\") pod \"manila-operator-controller-manager-7bb88cb858-7xd6m\" (UID: \"47c58999-f804-4a5c-bcc7-3aae79eab6da\") " pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.308861 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qqhm\" (UniqueName: \"kubernetes.io/projected/881c2298-a491-4657-9982-55fe889c9b4f-kube-api-access-2qqhm\") pod \"keystone-operator-controller-manager-7879fb76fd-7qkkw\" (UID: \"881c2298-a491-4657-9982-55fe889c9b4f\") " pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.312418 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.351914 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qqhm\" (UniqueName: \"kubernetes.io/projected/881c2298-a491-4657-9982-55fe889c9b4f-kube-api-access-2qqhm\") pod \"keystone-operator-controller-manager-7879fb76fd-7qkkw\" (UID: \"881c2298-a491-4657-9982-55fe889c9b4f\") " pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.359952 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b7nb\" (UniqueName: \"kubernetes.io/projected/47c58999-f804-4a5c-bcc7-3aae79eab6da-kube-api-access-2b7nb\") pod \"manila-operator-controller-manager-7bb88cb858-7xd6m\" (UID: \"47c58999-f804-4a5c-bcc7-3aae79eab6da\") " pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.412925 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jk4n\" (UniqueName: \"kubernetes.io/projected/5644bfb5-eee6-4ecd-976c-00ae40333bf3-kube-api-access-8jk4n\") pod \"octavia-operator-controller-manager-6fdc856c5d-d5wzw\" (UID: \"5644bfb5-eee6-4ecd-976c-00ae40333bf3\") " pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.413105 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7n545\" (UniqueName: \"kubernetes.io/projected/4e5dbca6-27a9-4c2b-81e1-4a062af18fa2-kube-api-access-7n545\") pod \"nova-operator-controller-manager-86d796d84d-87ktc\" (UID: \"4e5dbca6-27a9-4c2b-81e1-4a062af18fa2\") " pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.413144 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmrxp\" (UniqueName: \"kubernetes.io/projected/2f2e330f-c352-4cd9-afd0-bf306e99fb39-kube-api-access-wmrxp\") pod \"mariadb-operator-controller-manager-6f8c5b86cb-8qwvm\" (UID: \"2f2e330f-c352-4cd9-afd0-bf306e99fb39\") " pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.413219 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x274\" (UniqueName: \"kubernetes.io/projected/c63b62b2-ef4d-4138-9d5c-e7b087ac25ce-kube-api-access-9x274\") pod \"neutron-operator-controller-manager-66b7d6f598-x8k99\" (UID: \"c63b62b2-ef4d-4138-9d5c-e7b087ac25ce\") " pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.424876 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.452488 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.475307 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.480641 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.503689 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.505684 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.514349 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.515077 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.515282 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-4wk7w" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.515849 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jk4n\" (UniqueName: \"kubernetes.io/projected/5644bfb5-eee6-4ecd-976c-00ae40333bf3-kube-api-access-8jk4n\") pod \"octavia-operator-controller-manager-6fdc856c5d-d5wzw\" (UID: \"5644bfb5-eee6-4ecd-976c-00ae40333bf3\") " pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.515944 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7n545\" (UniqueName: \"kubernetes.io/projected/4e5dbca6-27a9-4c2b-81e1-4a062af18fa2-kube-api-access-7n545\") pod \"nova-operator-controller-manager-86d796d84d-87ktc\" (UID: \"4e5dbca6-27a9-4c2b-81e1-4a062af18fa2\") " pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.515971 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmrxp\" (UniqueName: \"kubernetes.io/projected/2f2e330f-c352-4cd9-afd0-bf306e99fb39-kube-api-access-wmrxp\") pod \"mariadb-operator-controller-manager-6f8c5b86cb-8qwvm\" (UID: \"2f2e330f-c352-4cd9-afd0-bf306e99fb39\") " pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.516012 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x274\" (UniqueName: \"kubernetes.io/projected/c63b62b2-ef4d-4138-9d5c-e7b087ac25ce-kube-api-access-9x274\") pod \"neutron-operator-controller-manager-66b7d6f598-x8k99\" (UID: \"c63b62b2-ef4d-4138-9d5c-e7b087ac25ce\") " pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.519372 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.522054 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.529283 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-f8cqj" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.529517 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.538773 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7n545\" (UniqueName: \"kubernetes.io/projected/4e5dbca6-27a9-4c2b-81e1-4a062af18fa2-kube-api-access-7n545\") pod \"nova-operator-controller-manager-86d796d84d-87ktc\" (UID: \"4e5dbca6-27a9-4c2b-81e1-4a062af18fa2\") " pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.542790 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.543532 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmrxp\" (UniqueName: \"kubernetes.io/projected/2f2e330f-c352-4cd9-afd0-bf306e99fb39-kube-api-access-wmrxp\") pod \"mariadb-operator-controller-manager-6f8c5b86cb-8qwvm\" (UID: \"2f2e330f-c352-4cd9-afd0-bf306e99fb39\") " pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.547456 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.548988 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.550028 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x274\" (UniqueName: \"kubernetes.io/projected/c63b62b2-ef4d-4138-9d5c-e7b087ac25ce-kube-api-access-9x274\") pod \"neutron-operator-controller-manager-66b7d6f598-x8k99\" (UID: \"c63b62b2-ef4d-4138-9d5c-e7b087ac25ce\") " pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.552687 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-9dggm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.553347 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.556847 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.557308 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jk4n\" (UniqueName: \"kubernetes.io/projected/5644bfb5-eee6-4ecd-976c-00ae40333bf3-kube-api-access-8jk4n\") pod \"octavia-operator-controller-manager-6fdc856c5d-d5wzw\" (UID: \"5644bfb5-eee6-4ecd-976c-00ae40333bf3\") " pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.563146 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-rjwvm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.563328 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.564469 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.565082 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.568399 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-hfmv2" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.574342 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.578666 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.590215 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.594122 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.595388 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.601035 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.604275 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-x4mc7" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.609261 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.618457 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmbtq\" (UniqueName: \"kubernetes.io/projected/62889a75-0d04-4f7f-b03e-225eaee9ce86-kube-api-access-vmbtq\") pod \"placement-operator-controller-manager-6dc664666c-z95rc\" (UID: \"62889a75-0d04-4f7f-b03e-225eaee9ce86\") " pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.618514 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-cert\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.618545 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjzft\" (UniqueName: \"kubernetes.io/projected/e304c11a-e256-4c84-a317-b8b7eadd767a-kube-api-access-xjzft\") pod \"ovn-operator-controller-manager-5bdf4f7f7f-mz68p\" (UID: \"e304c11a-e256-4c84-a317-b8b7eadd767a\") " pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.618574 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8szws\" (UniqueName: \"kubernetes.io/projected/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-kube-api-access-8szws\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.618621 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlbdz\" (UniqueName: \"kubernetes.io/projected/53fd7850-6e67-4a0f-88c5-ecb3870ce1aa-kube-api-access-vlbdz\") pod \"swift-operator-controller-manager-799cb6ffd6-gxhk7\" (UID: \"53fd7850-6e67-4a0f-88c5-ecb3870ce1aa\") " pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.618648 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdkck\" (UniqueName: \"kubernetes.io/projected/d60cbec8-ea75-476b-b4a2-9ff3272a11c1-kube-api-access-kdkck\") pod \"telemetry-operator-controller-manager-7798859c74-ksx5x\" (UID: \"d60cbec8-ea75-476b-b4a2-9ff3272a11c1\") " pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.621208 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.621298 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.624465 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-t7b4q" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.685027 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.686493 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.688071 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.696080 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.696724 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-knclb" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.708327 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722017 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8szws\" (UniqueName: \"kubernetes.io/projected/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-kube-api-access-8szws\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722112 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlbdz\" (UniqueName: \"kubernetes.io/projected/53fd7850-6e67-4a0f-88c5-ecb3870ce1aa-kube-api-access-vlbdz\") pod \"swift-operator-controller-manager-799cb6ffd6-gxhk7\" (UID: \"53fd7850-6e67-4a0f-88c5-ecb3870ce1aa\") " pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722160 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knphx\" (UniqueName: \"kubernetes.io/projected/3d0b08e2-b552-4a9a-a87d-bfae30f9045d-kube-api-access-knphx\") pod \"watcher-operator-controller-manager-7cd4fb6f79-mtc8r\" (UID: \"3d0b08e2-b552-4a9a-a87d-bfae30f9045d\") " pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722193 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdkck\" (UniqueName: \"kubernetes.io/projected/d60cbec8-ea75-476b-b4a2-9ff3272a11c1-kube-api-access-kdkck\") pod \"telemetry-operator-controller-manager-7798859c74-ksx5x\" (UID: \"d60cbec8-ea75-476b-b4a2-9ff3272a11c1\") " pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722230 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4s7l\" (UniqueName: \"kubernetes.io/projected/1c1c222a-bb01-48b9-8115-1a4a35278047-kube-api-access-b4s7l\") pod \"test-operator-controller-manager-8464cf66df-8w5rz\" (UID: \"1c1c222a-bb01-48b9-8115-1a4a35278047\") " pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722269 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmbtq\" (UniqueName: \"kubernetes.io/projected/62889a75-0d04-4f7f-b03e-225eaee9ce86-kube-api-access-vmbtq\") pod \"placement-operator-controller-manager-6dc664666c-z95rc\" (UID: \"62889a75-0d04-4f7f-b03e-225eaee9ce86\") " pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722325 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-cert\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.722344 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjzft\" (UniqueName: \"kubernetes.io/projected/e304c11a-e256-4c84-a317-b8b7eadd767a-kube-api-access-xjzft\") pod \"ovn-operator-controller-manager-5bdf4f7f7f-mz68p\" (UID: \"e304c11a-e256-4c84-a317-b8b7eadd767a\") " pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:24 crc kubenswrapper[4774]: E1121 14:21:24.725268 4774 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 21 14:21:24 crc kubenswrapper[4774]: E1121 14:21:24.725391 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-cert podName:fc7b50ee-89ac-491f-8dfd-23a32ccf9e82 nodeName:}" failed. No retries permitted until 2025-11-21 14:21:25.225358711 +0000 UTC m=+1075.877557970 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-cert") pod "openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" (UID: "fc7b50ee-89ac-491f-8dfd-23a32ccf9e82") : secret "openstack-baremetal-operator-webhook-server-cert" not found Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.748341 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.758492 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.758921 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.761117 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.765115 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-fdm8q" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.768455 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8szws\" (UniqueName: \"kubernetes.io/projected/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-kube-api-access-8szws\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.768771 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.773027 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlbdz\" (UniqueName: \"kubernetes.io/projected/53fd7850-6e67-4a0f-88c5-ecb3870ce1aa-kube-api-access-vlbdz\") pod \"swift-operator-controller-manager-799cb6ffd6-gxhk7\" (UID: \"53fd7850-6e67-4a0f-88c5-ecb3870ce1aa\") " pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.773137 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s"] Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.779096 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdkck\" (UniqueName: \"kubernetes.io/projected/d60cbec8-ea75-476b-b4a2-9ff3272a11c1-kube-api-access-kdkck\") pod \"telemetry-operator-controller-manager-7798859c74-ksx5x\" (UID: \"d60cbec8-ea75-476b-b4a2-9ff3272a11c1\") " pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.794676 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmbtq\" (UniqueName: \"kubernetes.io/projected/62889a75-0d04-4f7f-b03e-225eaee9ce86-kube-api-access-vmbtq\") pod \"placement-operator-controller-manager-6dc664666c-z95rc\" (UID: \"62889a75-0d04-4f7f-b03e-225eaee9ce86\") " pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.801977 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjzft\" (UniqueName: \"kubernetes.io/projected/e304c11a-e256-4c84-a317-b8b7eadd767a-kube-api-access-xjzft\") pod \"ovn-operator-controller-manager-5bdf4f7f7f-mz68p\" (UID: \"e304c11a-e256-4c84-a317-b8b7eadd767a\") " pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.825757 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knphx\" (UniqueName: \"kubernetes.io/projected/3d0b08e2-b552-4a9a-a87d-bfae30f9045d-kube-api-access-knphx\") pod \"watcher-operator-controller-manager-7cd4fb6f79-mtc8r\" (UID: \"3d0b08e2-b552-4a9a-a87d-bfae30f9045d\") " pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.825849 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/efa3c204-339b-4adc-ba7f-614c918c7873-cert\") pod \"openstack-operator-controller-manager-7755d5f8cc-9cc79\" (UID: \"efa3c204-339b-4adc-ba7f-614c918c7873\") " pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.825885 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62wbh\" (UniqueName: \"kubernetes.io/projected/70619ac6-0265-4fe9-aad9-f9e4797ac7f9-kube-api-access-62wbh\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s\" (UID: \"70619ac6-0265-4fe9-aad9-f9e4797ac7f9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.825929 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4s7l\" (UniqueName: \"kubernetes.io/projected/1c1c222a-bb01-48b9-8115-1a4a35278047-kube-api-access-b4s7l\") pod \"test-operator-controller-manager-8464cf66df-8w5rz\" (UID: \"1c1c222a-bb01-48b9-8115-1a4a35278047\") " pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.826063 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6flc2\" (UniqueName: \"kubernetes.io/projected/efa3c204-339b-4adc-ba7f-614c918c7873-kube-api-access-6flc2\") pod \"openstack-operator-controller-manager-7755d5f8cc-9cc79\" (UID: \"efa3c204-339b-4adc-ba7f-614c918c7873\") " pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.854112 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4s7l\" (UniqueName: \"kubernetes.io/projected/1c1c222a-bb01-48b9-8115-1a4a35278047-kube-api-access-b4s7l\") pod \"test-operator-controller-manager-8464cf66df-8w5rz\" (UID: \"1c1c222a-bb01-48b9-8115-1a4a35278047\") " pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.855176 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knphx\" (UniqueName: \"kubernetes.io/projected/3d0b08e2-b552-4a9a-a87d-bfae30f9045d-kube-api-access-knphx\") pod \"watcher-operator-controller-manager-7cd4fb6f79-mtc8r\" (UID: \"3d0b08e2-b552-4a9a-a87d-bfae30f9045d\") " pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.895104 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.928198 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/efa3c204-339b-4adc-ba7f-614c918c7873-cert\") pod \"openstack-operator-controller-manager-7755d5f8cc-9cc79\" (UID: \"efa3c204-339b-4adc-ba7f-614c918c7873\") " pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.928258 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62wbh\" (UniqueName: \"kubernetes.io/projected/70619ac6-0265-4fe9-aad9-f9e4797ac7f9-kube-api-access-62wbh\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s\" (UID: \"70619ac6-0265-4fe9-aad9-f9e4797ac7f9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.928371 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6flc2\" (UniqueName: \"kubernetes.io/projected/efa3c204-339b-4adc-ba7f-614c918c7873-kube-api-access-6flc2\") pod \"openstack-operator-controller-manager-7755d5f8cc-9cc79\" (UID: \"efa3c204-339b-4adc-ba7f-614c918c7873\") " pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.929091 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.956171 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6flc2\" (UniqueName: \"kubernetes.io/projected/efa3c204-339b-4adc-ba7f-614c918c7873-kube-api-access-6flc2\") pod \"openstack-operator-controller-manager-7755d5f8cc-9cc79\" (UID: \"efa3c204-339b-4adc-ba7f-614c918c7873\") " pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.958928 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/efa3c204-339b-4adc-ba7f-614c918c7873-cert\") pod \"openstack-operator-controller-manager-7755d5f8cc-9cc79\" (UID: \"efa3c204-339b-4adc-ba7f-614c918c7873\") " pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.993079 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.998444 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62wbh\" (UniqueName: \"kubernetes.io/projected/70619ac6-0265-4fe9-aad9-f9e4797ac7f9-kube-api-access-62wbh\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s\" (UID: \"70619ac6-0265-4fe9-aad9-f9e4797ac7f9\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" Nov 21 14:21:24 crc kubenswrapper[4774]: I1121 14:21:24.998564 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.027029 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.039927 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.078471 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" event={"ID":"c28fdb9b-2f84-41f0-ae41-977dca177484","Type":"ContainerStarted","Data":"4afb3bba9ff18d80a720f98b94a715ecfeb6fe41f319128e2126d91a89ff90b3"} Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.107940 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.123553 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.124209 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.130316 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.149097 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.235594 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-cert\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.243681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fc7b50ee-89ac-491f-8dfd-23a32ccf9e82-cert\") pod \"openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk\" (UID: \"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.484383 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.552139 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.578221 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.592298 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.610097 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.618747 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.627811 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.633566 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt"] Nov 21 14:21:25 crc kubenswrapper[4774]: W1121 14:21:25.666043 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47c58999_f804_4a5c_bcc7_3aae79eab6da.slice/crio-84de826911672aad86f07a3ac0c06aec7e61b13f6de9b807490867a63939819e WatchSource:0}: Error finding container 84de826911672aad86f07a3ac0c06aec7e61b13f6de9b807490867a63939819e: Status 404 returned error can't find the container with id 84de826911672aad86f07a3ac0c06aec7e61b13f6de9b807490867a63939819e Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.767349 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.774586 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw"] Nov 21 14:21:25 crc kubenswrapper[4774]: W1121 14:21:25.775939 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5644bfb5_eee6_4ecd_976c_00ae40333bf3.slice/crio-3498ef097dd75b4f5fac250de2352fd52b46102944a92aee932aec3ba0e6d0f9 WatchSource:0}: Error finding container 3498ef097dd75b4f5fac250de2352fd52b46102944a92aee932aec3ba0e6d0f9: Status 404 returned error can't find the container with id 3498ef097dd75b4f5fac250de2352fd52b46102944a92aee932aec3ba0e6d0f9 Nov 21 14:21:25 crc kubenswrapper[4774]: W1121 14:21:25.784704 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e5dbca6_27a9_4c2b_81e1_4a062af18fa2.slice/crio-cfb18c62f91d899487bfe1fb33862950566b6d9b5e27d17e6b62a8c84f083dd9 WatchSource:0}: Error finding container cfb18c62f91d899487bfe1fb33862950566b6d9b5e27d17e6b62a8c84f083dd9: Status 404 returned error can't find the container with id cfb18c62f91d899487bfe1fb33862950566b6d9b5e27d17e6b62a8c84f083dd9 Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.791602 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99"] Nov 21 14:21:25 crc kubenswrapper[4774]: W1121 14:21:25.792966 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc63b62b2_ef4d_4138_9d5c_e7b087ac25ce.slice/crio-65e78296b8a134437013cb9d8152334617411d2a30c07c896c48124979cf1626 WatchSource:0}: Error finding container 65e78296b8a134437013cb9d8152334617411d2a30c07c896c48124979cf1626: Status 404 returned error can't find the container with id 65e78296b8a134437013cb9d8152334617411d2a30c07c896c48124979cf1626 Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.802647 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm"] Nov 21 14:21:25 crc kubenswrapper[4774]: E1121 14:21:25.809585 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wmrxp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-6f8c5b86cb-8qwvm_openstack-operators(2f2e330f-c352-4cd9-afd0-bf306e99fb39): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.928837 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7"] Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.940401 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc"] Nov 21 14:21:25 crc kubenswrapper[4774]: E1121 14:21:25.943679 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vmbtq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-6dc664666c-z95rc_openstack-operators(62889a75-0d04-4f7f-b03e-225eaee9ce86): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 21 14:21:25 crc kubenswrapper[4774]: I1121 14:21:25.948604 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p"] Nov 21 14:21:25 crc kubenswrapper[4774]: W1121 14:21:25.962167 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode304c11a_e256_4c84_a317_b8b7eadd767a.slice/crio-04fda216baff6b8dda909609472c836cfd3e0465d365b64f37fa9d5346e69f4f WatchSource:0}: Error finding container 04fda216baff6b8dda909609472c836cfd3e0465d365b64f37fa9d5346e69f4f: Status 404 returned error can't find the container with id 04fda216baff6b8dda909609472c836cfd3e0465d365b64f37fa9d5346e69f4f Nov 21 14:21:25 crc kubenswrapper[4774]: E1121 14:21:25.968107 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xjzft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-5bdf4f7f7f-mz68p_openstack-operators(e304c11a-e256-4c84-a317-b8b7eadd767a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.109866 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" event={"ID":"5644bfb5-eee6-4ecd-976c-00ae40333bf3","Type":"ContainerStarted","Data":"3498ef097dd75b4f5fac250de2352fd52b46102944a92aee932aec3ba0e6d0f9"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.109932 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s"] Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.110391 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" event={"ID":"c63b62b2-ef4d-4138-9d5c-e7b087ac25ce","Type":"ContainerStarted","Data":"65e78296b8a134437013cb9d8152334617411d2a30c07c896c48124979cf1626"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.116905 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" event={"ID":"e304c11a-e256-4c84-a317-b8b7eadd767a","Type":"ContainerStarted","Data":"04fda216baff6b8dda909609472c836cfd3e0465d365b64f37fa9d5346e69f4f"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.119986 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" event={"ID":"f50b16b0-3430-4378-a32c-8d09f402108e","Type":"ContainerStarted","Data":"f5e2b5cd37a2671bdb62a83bb9a6ee091a4a26ffc7bbc588c06c7b40b97da564"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.128715 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79"] Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.131519 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" event={"ID":"d22140fe-1eb2-4f64-84e5-1d3ad3902a94","Type":"ContainerStarted","Data":"802dbbbf6a7b9634aca1ce030800053b72200339a88b20259a916ed59f3a2a8b"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.142730 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x"] Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.145295 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-b4s7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-8464cf66df-8w5rz_openstack-operators(1c1c222a-bb01-48b9-8115-1a4a35278047): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.165610 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r"] Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.172381 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" podUID="2f2e330f-c352-4cd9-afd0-bf306e99fb39" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.173409 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" event={"ID":"340633c8-9873-455b-9ad3-617764d7f1ad","Type":"ContainerStarted","Data":"1b91b5462d7d91147fd895b46a7c6e95b8bd23819513528c1af53b28d09cbb25"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.175782 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" event={"ID":"53fd7850-6e67-4a0f-88c5-ecb3870ce1aa","Type":"ContainerStarted","Data":"147dbbf3bb743e075a207a5294aee75410efe4e718331c5b80d618f988581ac0"} Nov 21 14:21:26 crc kubenswrapper[4774]: W1121 14:21:26.177078 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d0b08e2_b552_4a9a_a87d_bfae30f9045d.slice/crio-b5f86d3bda131e92d2727b33a7af111607a816a24d73a0d3a1128750df867ca0 WatchSource:0}: Error finding container b5f86d3bda131e92d2727b33a7af111607a816a24d73a0d3a1128750df867ca0: Status 404 returned error can't find the container with id b5f86d3bda131e92d2727b33a7af111607a816a24d73a0d3a1128750df867ca0 Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.179805 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" event={"ID":"496286dc-00cb-42ae-914e-4d8769847726","Type":"ContainerStarted","Data":"fdaddabccf0af5c8cac344a38f00d9dcd2100aef91d2b122c62e6e9234c0751c"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.181911 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" event={"ID":"881c2298-a491-4657-9982-55fe889c9b4f","Type":"ContainerStarted","Data":"afd20fad17b0e35789200590d000686608e87f0984e8191b9cf2678dd749eccf"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.184831 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" event={"ID":"cf13e842-38d2-409c-87f8-3163868965d8","Type":"ContainerStarted","Data":"d85ea28c6e95bcfb111941d62f3d226e46c9f95b8ff3b80a28b38f6db51db447"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.185223 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz"] Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.191227 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kdkck,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-7798859c74-ksx5x_openstack-operators(d60cbec8-ea75-476b-b4a2-9ff3272a11c1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.191360 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" event={"ID":"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128","Type":"ContainerStarted","Data":"ce4b340a633f2af79f0b4343b169b9901866e2338b3f2536f7d86b7c0238d7c2"} Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.191373 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-knphx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-7cd4fb6f79-mtc8r_openstack-operators(3d0b08e2-b552-4a9a-a87d-bfae30f9045d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.218346 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" podUID="62889a75-0d04-4f7f-b03e-225eaee9ce86" Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.225785 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" podUID="e304c11a-e256-4c84-a317-b8b7eadd767a" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.226065 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" event={"ID":"2f2e330f-c352-4cd9-afd0-bf306e99fb39","Type":"ContainerStarted","Data":"739a63b141348c5f18df958f9eab029f7dcbca1df20046be497ab833bf2f6063"} Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.230013 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" podUID="2f2e330f-c352-4cd9-afd0-bf306e99fb39" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.241619 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" event={"ID":"62889a75-0d04-4f7f-b03e-225eaee9ce86","Type":"ContainerStarted","Data":"ae73ea7a99c3d69360fc1dbbe639398feb63d35caec8e8d9f6a687d15a3ec317"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.252003 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" event={"ID":"c2a3d34a-eca8-4106-8a2b-47254b1af44b","Type":"ContainerStarted","Data":"8f157de6ed31140babb505de2efa1ed48cc582900200aa843664007c386d65dc"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.259109 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" event={"ID":"4e5dbca6-27a9-4c2b-81e1-4a062af18fa2","Type":"ContainerStarted","Data":"cfb18c62f91d899487bfe1fb33862950566b6d9b5e27d17e6b62a8c84f083dd9"} Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.261234 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" event={"ID":"47c58999-f804-4a5c-bcc7-3aae79eab6da","Type":"ContainerStarted","Data":"84de826911672aad86f07a3ac0c06aec7e61b13f6de9b807490867a63939819e"} Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.283899 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\"" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" podUID="62889a75-0d04-4f7f-b03e-225eaee9ce86" Nov 21 14:21:26 crc kubenswrapper[4774]: I1121 14:21:26.301052 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk"] Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.453676 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" podUID="d60cbec8-ea75-476b-b4a2-9ff3272a11c1" Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.479367 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" podUID="3d0b08e2-b552-4a9a-a87d-bfae30f9045d" Nov 21 14:21:26 crc kubenswrapper[4774]: E1121 14:21:26.516878 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" podUID="1c1c222a-bb01-48b9-8115-1a4a35278047" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.284098 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" event={"ID":"efa3c204-339b-4adc-ba7f-614c918c7873","Type":"ContainerStarted","Data":"fc02ac86f6fa256a02f396640a3d36f5ff33d4db75d05a01cd0a6dc9c0cd6ff2"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.284630 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" event={"ID":"efa3c204-339b-4adc-ba7f-614c918c7873","Type":"ContainerStarted","Data":"05adfbe068ffc3acc50103b9b6544165d63fa2313c763c0fc27b9c07a950fbd1"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.284649 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" event={"ID":"efa3c204-339b-4adc-ba7f-614c918c7873","Type":"ContainerStarted","Data":"07201508df3e04039a788ee5429c9728a1b95f48d7dfa77c20ff1a97b873fa08"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.284777 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.289725 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" event={"ID":"e304c11a-e256-4c84-a317-b8b7eadd767a","Type":"ContainerStarted","Data":"607f43c341ebfbe1541c335b519e95cd355540f729295ac03d043fb57d07a437"} Nov 21 14:21:27 crc kubenswrapper[4774]: E1121 14:21:27.291644 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" podUID="e304c11a-e256-4c84-a317-b8b7eadd767a" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.296179 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" event={"ID":"d60cbec8-ea75-476b-b4a2-9ff3272a11c1","Type":"ContainerStarted","Data":"b3ee7e62a963588e7e82861fb427b50c63606999801b9d43e6dfc0726cd35e37"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.296251 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" event={"ID":"d60cbec8-ea75-476b-b4a2-9ff3272a11c1","Type":"ContainerStarted","Data":"a34ad90c18b96accbfb4cf916afbad7497eef260a71d8e4b6cc89871201f2425"} Nov 21 14:21:27 crc kubenswrapper[4774]: E1121 14:21:27.301139 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" podUID="d60cbec8-ea75-476b-b4a2-9ff3272a11c1" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.302020 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" event={"ID":"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82","Type":"ContainerStarted","Data":"d93b2f39c693f08dc7df8b9648aa2a96f16e8b0817d7b0eac7cb45f7a82a0a59"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.320857 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" podStartSLOduration=3.320796398 podStartE2EDuration="3.320796398s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:21:27.318878323 +0000 UTC m=+1077.971077582" watchObservedRunningTime="2025-11-21 14:21:27.320796398 +0000 UTC m=+1077.972995657" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.328326 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" event={"ID":"3d0b08e2-b552-4a9a-a87d-bfae30f9045d","Type":"ContainerStarted","Data":"884ae5253a9ff2cefa0713a064d12ad2eb0b6e8c398cda162489e1f01c199ce5"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.328686 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" event={"ID":"3d0b08e2-b552-4a9a-a87d-bfae30f9045d","Type":"ContainerStarted","Data":"b5f86d3bda131e92d2727b33a7af111607a816a24d73a0d3a1128750df867ca0"} Nov 21 14:21:27 crc kubenswrapper[4774]: E1121 14:21:27.331839 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" podUID="3d0b08e2-b552-4a9a-a87d-bfae30f9045d" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.335127 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" event={"ID":"70619ac6-0265-4fe9-aad9-f9e4797ac7f9","Type":"ContainerStarted","Data":"f4da20ac9cba9663dc9a54895eb35fc7073f2e2b94b730e3965d0b8b90881e1c"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.353541 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" event={"ID":"2f2e330f-c352-4cd9-afd0-bf306e99fb39","Type":"ContainerStarted","Data":"51645577f9b3b16d8a8f12570a10e9350c4bfac0f7ef3ed545dff2d2b937d9e0"} Nov 21 14:21:27 crc kubenswrapper[4774]: E1121 14:21:27.366708 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" podUID="2f2e330f-c352-4cd9-afd0-bf306e99fb39" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.370448 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" event={"ID":"62889a75-0d04-4f7f-b03e-225eaee9ce86","Type":"ContainerStarted","Data":"c9f6683b351dadb0ddfc53083bd763607f689c97312933edecdb6264d686ba10"} Nov 21 14:21:27 crc kubenswrapper[4774]: E1121 14:21:27.376095 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\"" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" podUID="62889a75-0d04-4f7f-b03e-225eaee9ce86" Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.389469 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" event={"ID":"1c1c222a-bb01-48b9-8115-1a4a35278047","Type":"ContainerStarted","Data":"d14f8c50cf61a15fd1edd3bc48e22973787c9e12b91fc29a70f2b43fd656a4ef"} Nov 21 14:21:27 crc kubenswrapper[4774]: I1121 14:21:27.389548 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" event={"ID":"1c1c222a-bb01-48b9-8115-1a4a35278047","Type":"ContainerStarted","Data":"aaf76592beeda9decaadfe6c77a00fb813c4dbfb175eac87bee3b7c10a9b2527"} Nov 21 14:21:27 crc kubenswrapper[4774]: E1121 14:21:27.396839 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d\\\"\"" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" podUID="1c1c222a-bb01-48b9-8115-1a4a35278047" Nov 21 14:21:28 crc kubenswrapper[4774]: E1121 14:21:28.408666 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:4094e7fc11a33e8e2b6768a053cafaf5b122446d23f9113d43d520cb64e9776c\\\"\"" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" podUID="62889a75-0d04-4f7f-b03e-225eaee9ce86" Nov 21 14:21:28 crc kubenswrapper[4774]: E1121 14:21:28.408738 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:5324a6d2f76fc3041023b0cbd09a733ef2b59f310d390e4d6483d219eb96494f\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" podUID="d60cbec8-ea75-476b-b4a2-9ff3272a11c1" Nov 21 14:21:28 crc kubenswrapper[4774]: E1121 14:21:28.408728 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:5d49d4594c66eda7b151746cc6e1d3c67c0129b4503eeb043a64ae8ec2da6a1b\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" podUID="e304c11a-e256-4c84-a317-b8b7eadd767a" Nov 21 14:21:28 crc kubenswrapper[4774]: E1121 14:21:28.408781 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:7b90521b9e9cb4eb43c2f1c3bf85dbd068d684315f4f705b07708dd078df9d04\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" podUID="2f2e330f-c352-4cd9-afd0-bf306e99fb39" Nov 21 14:21:28 crc kubenswrapper[4774]: E1121 14:21:28.408867 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:82207e753574d4be246f86c4b074500d66cf20214aa80f0a8525cf3287a35e6d\\\"\"" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" podUID="1c1c222a-bb01-48b9-8115-1a4a35278047" Nov 21 14:21:28 crc kubenswrapper[4774]: E1121 14:21:28.418295 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:4838402d41d42c56613d43dc5041aae475a2b18e6172491d6c4d4a78a580697f\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" podUID="3d0b08e2-b552-4a9a-a87d-bfae30f9045d" Nov 21 14:21:35 crc kubenswrapper[4774]: I1121 14:21:35.132379 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7755d5f8cc-9cc79" Nov 21 14:21:37 crc kubenswrapper[4774]: I1121 14:21:37.487865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" event={"ID":"c63b62b2-ef4d-4138-9d5c-e7b087ac25ce","Type":"ContainerStarted","Data":"ffc160484039cec941f747f0c3e57656d05b420b17ddb03d8db6b36f645435e3"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.604616 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" event={"ID":"340633c8-9873-455b-9ad3-617764d7f1ad","Type":"ContainerStarted","Data":"13d2cd1442db4ecd2c344454643f969df89bc7aac8b2623397ad17c63ea307ef"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.627852 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" event={"ID":"c63b62b2-ef4d-4138-9d5c-e7b087ac25ce","Type":"ContainerStarted","Data":"be17964cecd06d03c995ba8c822ce54e47326162465aa6f68658eb52909b7beb"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.629035 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.641259 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" event={"ID":"496286dc-00cb-42ae-914e-4d8769847726","Type":"ContainerStarted","Data":"5aeaaf764814579a2a21f3875e991f97399f18436925b3e33f5172f2751acc6a"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.680473 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" podStartSLOduration=4.309115325 podStartE2EDuration="15.680451954s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.794761174 +0000 UTC m=+1076.446960433" lastFinishedPulling="2025-11-21 14:21:37.166097803 +0000 UTC m=+1087.818297062" observedRunningTime="2025-11-21 14:21:38.677664035 +0000 UTC m=+1089.329863294" watchObservedRunningTime="2025-11-21 14:21:38.680451954 +0000 UTC m=+1089.332651213" Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.686800 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" event={"ID":"d22140fe-1eb2-4f64-84e5-1d3ad3902a94","Type":"ContainerStarted","Data":"75ed05dc1e70b1464d2518fc663e8686eabfe7c94a7a34be8513ca56c4b25315"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.711631 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" event={"ID":"70619ac6-0265-4fe9-aad9-f9e4797ac7f9","Type":"ContainerStarted","Data":"60e33584eb7f074c6ccdf492741ce4caed3f3cb8efd99ecef0c7503d470eed38"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.739342 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" event={"ID":"c2a3d34a-eca8-4106-8a2b-47254b1af44b","Type":"ContainerStarted","Data":"5eefdbb165a85d60bd8ced87272b01cc62c084a187bbcc869c5931368ac602c2"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.753425 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s" podStartSLOduration=3.5387942199999998 podStartE2EDuration="14.753365992s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:26.134022491 +0000 UTC m=+1076.786221750" lastFinishedPulling="2025-11-21 14:21:37.348594263 +0000 UTC m=+1088.000793522" observedRunningTime="2025-11-21 14:21:38.744093858 +0000 UTC m=+1089.396293117" watchObservedRunningTime="2025-11-21 14:21:38.753365992 +0000 UTC m=+1089.405565251" Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.758267 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" event={"ID":"4e5dbca6-27a9-4c2b-81e1-4a062af18fa2","Type":"ContainerStarted","Data":"cd9fdcee051b47b703bd0091e24b352101e4db84af7a62b6353d93616d89238d"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.764785 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" event={"ID":"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82","Type":"ContainerStarted","Data":"e7efa392f28f9e145eeb59cbcbfc0744b3e8c8ead667b58632f82fe45ecdb57a"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.787278 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" event={"ID":"f50b16b0-3430-4378-a32c-8d09f402108e","Type":"ContainerStarted","Data":"589328e005995305ca84f95029b62c7bac05bc141be08dea22a50d709d0ec90b"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.795360 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" event={"ID":"c28fdb9b-2f84-41f0-ae41-977dca177484","Type":"ContainerStarted","Data":"5fb0fd617826b5469835cc7e955d4d21213f4c7e85f33391bc066fcf4c9e1bd7"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.798690 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" event={"ID":"53fd7850-6e67-4a0f-88c5-ecb3870ce1aa","Type":"ContainerStarted","Data":"5afbba02ad3858d10afded9ceba97af9039ef601a51390d4539f643ce77377ab"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.798870 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.800922 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" event={"ID":"cf13e842-38d2-409c-87f8-3163868965d8","Type":"ContainerStarted","Data":"7dcdccc557f5fb1eba2fcf8ed83722bbe5049ad627822f34c9fbe1cca78c8f93"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.801439 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.815634 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" event={"ID":"5644bfb5-eee6-4ecd-976c-00ae40333bf3","Type":"ContainerStarted","Data":"36e5fa851f8be36e00a2fe7c18a4ed89418a03dbb9c9c6a86f416ab8170a2b39"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.848881 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" event={"ID":"881c2298-a491-4657-9982-55fe889c9b4f","Type":"ContainerStarted","Data":"5238e583563bf1447ff9f15e1d13176242846f40f64bd84a8f2c742a89e111d1"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.861638 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" event={"ID":"47c58999-f804-4a5c-bcc7-3aae79eab6da","Type":"ContainerStarted","Data":"06c55ebe75c5c965f61649c26f3857db22e14a65b70d898ae618684aa2dca024"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.867755 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" event={"ID":"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128","Type":"ContainerStarted","Data":"825ecee0fa2dc846f09c1e00be919759e010d647b6aa6e93a000dc87b882e349"} Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.869252 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" podStartSLOduration=3.63075869 podStartE2EDuration="14.869216673s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.933124396 +0000 UTC m=+1076.585323655" lastFinishedPulling="2025-11-21 14:21:37.171582379 +0000 UTC m=+1087.823781638" observedRunningTime="2025-11-21 14:21:38.844116498 +0000 UTC m=+1089.496315757" watchObservedRunningTime="2025-11-21 14:21:38.869216673 +0000 UTC m=+1089.521415942" Nov 21 14:21:38 crc kubenswrapper[4774]: I1121 14:21:38.886387 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" podStartSLOduration=4.37175644 podStartE2EDuration="15.886348592s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.652764567 +0000 UTC m=+1076.304963826" lastFinishedPulling="2025-11-21 14:21:37.167356719 +0000 UTC m=+1087.819555978" observedRunningTime="2025-11-21 14:21:38.873001801 +0000 UTC m=+1089.525201060" watchObservedRunningTime="2025-11-21 14:21:38.886348592 +0000 UTC m=+1089.538547841" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.881270 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" event={"ID":"c28fdb9b-2f84-41f0-ae41-977dca177484","Type":"ContainerStarted","Data":"08d9fc66cdaaab05dd011981be4789673912739b923279cc9d8925d077731392"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.881874 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.885839 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" event={"ID":"5644bfb5-eee6-4ecd-976c-00ae40333bf3","Type":"ContainerStarted","Data":"2f6235c3f099a854f77eba0821b1ff773aec5c527f32b87c24e14efd4fac9a0b"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.885997 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.890307 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" event={"ID":"4e5dbca6-27a9-4c2b-81e1-4a062af18fa2","Type":"ContainerStarted","Data":"bcefe1ce56576923f822278af16fb06a4fdc142a77cf62a3373fe3490d984141"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.891017 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.894275 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" event={"ID":"496286dc-00cb-42ae-914e-4d8769847726","Type":"ContainerStarted","Data":"202473332d166280de853ced309d877fe9b78c479781933588b2917291e6d530"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.894448 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.897695 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" event={"ID":"881c2298-a491-4657-9982-55fe889c9b4f","Type":"ContainerStarted","Data":"db486ba0513a0d46d7b22f2a382f8a779636e4a499efb76092bd09f743e9b031"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.897807 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.915184 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" podStartSLOduration=4.65912469 podStartE2EDuration="16.915144338s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:24.910088825 +0000 UTC m=+1075.562288084" lastFinishedPulling="2025-11-21 14:21:37.166108463 +0000 UTC m=+1087.818307732" observedRunningTime="2025-11-21 14:21:39.901244572 +0000 UTC m=+1090.553443841" watchObservedRunningTime="2025-11-21 14:21:39.915144338 +0000 UTC m=+1090.567343597" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.917361 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" event={"ID":"cf13e842-38d2-409c-87f8-3163868965d8","Type":"ContainerStarted","Data":"981a3d21b43debca06786eda9c0480f7e57f849d79cebd0a4632aecd37d9a48b"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.924315 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" event={"ID":"c2a3d34a-eca8-4106-8a2b-47254b1af44b","Type":"ContainerStarted","Data":"3ededcc43acab4381530a80f9781f8060936db2c97d62ef1ffa11277952ee1e0"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.924980 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.927863 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" event={"ID":"53fd7850-6e67-4a0f-88c5-ecb3870ce1aa","Type":"ContainerStarted","Data":"61e17a9dbe0613dcff4789a3f878080b86e9f61098a16318b8fb7318f62c6880"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.931722 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" event={"ID":"7e09cfc1-a56d-49fb-ac6f-f9007b4a4128","Type":"ContainerStarted","Data":"45e3cc7434160301bbee878eab78a9b1e05b69d8ceffac353b96010a323a6e8f"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.932551 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.943238 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" podStartSLOduration=5.555197883 podStartE2EDuration="16.943219288s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.784241984 +0000 UTC m=+1076.436441243" lastFinishedPulling="2025-11-21 14:21:37.172263389 +0000 UTC m=+1087.824462648" observedRunningTime="2025-11-21 14:21:39.937757942 +0000 UTC m=+1090.589957191" watchObservedRunningTime="2025-11-21 14:21:39.943219288 +0000 UTC m=+1090.595418547" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.947237 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" event={"ID":"d22140fe-1eb2-4f64-84e5-1d3ad3902a94","Type":"ContainerStarted","Data":"b3908dbc6d2f706e08ef955c6702abc84c9562bb51fc02e5ab6aae032b5ba10e"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.947502 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.951178 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" event={"ID":"340633c8-9873-455b-9ad3-617764d7f1ad","Type":"ContainerStarted","Data":"6a0bdfff661f1076d9a36b19a5406cc19a760885946a7a86f360ab59b2aaffa2"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.952115 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.966235 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" podStartSLOduration=5.440070003 podStartE2EDuration="16.966202353s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.635368172 +0000 UTC m=+1076.287567431" lastFinishedPulling="2025-11-21 14:21:37.161500522 +0000 UTC m=+1087.813699781" observedRunningTime="2025-11-21 14:21:39.958711589 +0000 UTC m=+1090.610910848" watchObservedRunningTime="2025-11-21 14:21:39.966202353 +0000 UTC m=+1090.618401612" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.966924 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" event={"ID":"47c58999-f804-4a5c-bcc7-3aae79eab6da","Type":"ContainerStarted","Data":"e0d69c8c8dc2b7088551e7caffcf8e007e754e4cf1cbf05acdee8fdd536f899e"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.967281 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.969682 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" event={"ID":"fc7b50ee-89ac-491f-8dfd-23a32ccf9e82","Type":"ContainerStarted","Data":"f600b1ff614031a22c5a9af7af3359ea04dad90cb56a0f2ae1c8f04a34f34a7d"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.970230 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.974057 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" event={"ID":"f50b16b0-3430-4378-a32c-8d09f402108e","Type":"ContainerStarted","Data":"273d35bd25701b4bc4e1cc2f2cd49547c0fec3b945cf4dc6f747f178ac446d16"} Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.975183 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:39 crc kubenswrapper[4774]: I1121 14:21:39.978127 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" podStartSLOduration=5.596904221 podStartE2EDuration="16.978084791s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.787303111 +0000 UTC m=+1076.439502370" lastFinishedPulling="2025-11-21 14:21:37.168483681 +0000 UTC m=+1087.820682940" observedRunningTime="2025-11-21 14:21:39.976870606 +0000 UTC m=+1090.629069875" watchObservedRunningTime="2025-11-21 14:21:39.978084791 +0000 UTC m=+1090.630284080" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.033328 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" podStartSLOduration=5.501339469 podStartE2EDuration="17.033311385s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.634138387 +0000 UTC m=+1076.286337656" lastFinishedPulling="2025-11-21 14:21:37.166110313 +0000 UTC m=+1087.818309572" observedRunningTime="2025-11-21 14:21:40.030154925 +0000 UTC m=+1090.682354204" watchObservedRunningTime="2025-11-21 14:21:40.033311385 +0000 UTC m=+1090.685510644" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.036670 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" podStartSLOduration=5.040526947 podStartE2EDuration="17.03665849s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.179628295 +0000 UTC m=+1075.831827544" lastFinishedPulling="2025-11-21 14:21:37.175759818 +0000 UTC m=+1087.827959087" observedRunningTime="2025-11-21 14:21:40.012231344 +0000 UTC m=+1090.664430623" watchObservedRunningTime="2025-11-21 14:21:40.03665849 +0000 UTC m=+1090.688857779" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.055297 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" podStartSLOduration=5.068814132 podStartE2EDuration="17.05526671s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.179671446 +0000 UTC m=+1075.831870705" lastFinishedPulling="2025-11-21 14:21:37.166124024 +0000 UTC m=+1087.818323283" observedRunningTime="2025-11-21 14:21:40.046622724 +0000 UTC m=+1090.698821983" watchObservedRunningTime="2025-11-21 14:21:40.05526671 +0000 UTC m=+1090.707465969" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.073812 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" podStartSLOduration=5.571089715 podStartE2EDuration="17.073784868s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.664695617 +0000 UTC m=+1076.316894876" lastFinishedPulling="2025-11-21 14:21:37.16739077 +0000 UTC m=+1087.819590029" observedRunningTime="2025-11-21 14:21:40.067400666 +0000 UTC m=+1090.719599945" watchObservedRunningTime="2025-11-21 14:21:40.073784868 +0000 UTC m=+1090.725984127" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.096206 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" podStartSLOduration=5.561600386 podStartE2EDuration="17.096171776s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.6366948 +0000 UTC m=+1076.288894059" lastFinishedPulling="2025-11-21 14:21:37.17126619 +0000 UTC m=+1087.823465449" observedRunningTime="2025-11-21 14:21:40.087752716 +0000 UTC m=+1090.739951985" watchObservedRunningTime="2025-11-21 14:21:40.096171776 +0000 UTC m=+1090.748371035" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.113915 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" podStartSLOduration=5.605353572 podStartE2EDuration="17.113892081s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.672617203 +0000 UTC m=+1076.324816462" lastFinishedPulling="2025-11-21 14:21:37.181155712 +0000 UTC m=+1087.833354971" observedRunningTime="2025-11-21 14:21:40.110149874 +0000 UTC m=+1090.762349133" watchObservedRunningTime="2025-11-21 14:21:40.113892081 +0000 UTC m=+1090.766091340" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.154938 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" podStartSLOduration=6.307906392 podStartE2EDuration="17.154899189s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:26.321399821 +0000 UTC m=+1076.973599090" lastFinishedPulling="2025-11-21 14:21:37.168392628 +0000 UTC m=+1087.820591887" observedRunningTime="2025-11-21 14:21:40.137398811 +0000 UTC m=+1090.789598080" watchObservedRunningTime="2025-11-21 14:21:40.154899189 +0000 UTC m=+1090.807098438" Nov 21 14:21:40 crc kubenswrapper[4774]: I1121 14:21:40.173476 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" podStartSLOduration=5.671413395 podStartE2EDuration="17.173434158s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.665065228 +0000 UTC m=+1076.317264497" lastFinishedPulling="2025-11-21 14:21:37.167086001 +0000 UTC m=+1087.819285260" observedRunningTime="2025-11-21 14:21:40.164581605 +0000 UTC m=+1090.816780864" watchObservedRunningTime="2025-11-21 14:21:40.173434158 +0000 UTC m=+1090.825633417" Nov 21 14:21:42 crc kubenswrapper[4774]: I1121 14:21:42.008188 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7768f8c84f-jf5xt" Nov 21 14:21:42 crc kubenswrapper[4774]: I1121 14:21:42.012888 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-769d9c7585-7hqmf" Nov 21 14:21:42 crc kubenswrapper[4774]: I1121 14:21:42.015353 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-56dfb6b67f-qs9lx" Nov 21 14:21:42 crc kubenswrapper[4774]: I1121 14:21:42.015597 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-5c75d7c94b-9j9tx" Nov 21 14:21:42 crc kubenswrapper[4774]: I1121 14:21:42.018395 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk" Nov 21 14:21:43 crc kubenswrapper[4774]: I1121 14:21:43.024550 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" event={"ID":"d60cbec8-ea75-476b-b4a2-9ff3272a11c1","Type":"ContainerStarted","Data":"72dcc1d6947630737c9800fd2cc2f4cdb49f691e762a607686f567973c38d97b"} Nov 21 14:21:43 crc kubenswrapper[4774]: I1121 14:21:43.025744 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:43 crc kubenswrapper[4774]: I1121 14:21:43.030766 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" event={"ID":"62889a75-0d04-4f7f-b03e-225eaee9ce86","Type":"ContainerStarted","Data":"bf9815fd69f9aa71bfda6731a358422745a00d3799d5fe4b1a89a95b524ced3f"} Nov 21 14:21:43 crc kubenswrapper[4774]: I1121 14:21:43.051964 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" podStartSLOduration=3.332177113 podStartE2EDuration="19.051934151s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:26.191010875 +0000 UTC m=+1076.843210134" lastFinishedPulling="2025-11-21 14:21:41.910767913 +0000 UTC m=+1092.562967172" observedRunningTime="2025-11-21 14:21:43.049315666 +0000 UTC m=+1093.701514935" watchObservedRunningTime="2025-11-21 14:21:43.051934151 +0000 UTC m=+1093.704133420" Nov 21 14:21:43 crc kubenswrapper[4774]: I1121 14:21:43.074259 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" podStartSLOduration=3.112171584 podStartE2EDuration="19.074225816s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.943463581 +0000 UTC m=+1076.595662840" lastFinishedPulling="2025-11-21 14:21:41.905517813 +0000 UTC m=+1092.557717072" observedRunningTime="2025-11-21 14:21:43.070778218 +0000 UTC m=+1093.722977477" watchObservedRunningTime="2025-11-21 14:21:43.074225816 +0000 UTC m=+1093.726425075" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.024911 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6d8fd67bf7-vfpjr" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.109556 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-8667fbf6f6-mx2n9" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.191673 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-bf4c6585d-ctkn6" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.314448 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-5d86b44686-c8wfq" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.526853 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7879fb76fd-7qkkw" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.576285 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7bb88cb858-7xd6m" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.720243 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-86d796d84d-87ktc" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.774873 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-6fdc856c5d-d5wzw" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.776727 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-66b7d6f598-x8k99" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.930004 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:44 crc kubenswrapper[4774]: I1121 14:21:44.995978 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-799cb6ffd6-gxhk7" Nov 21 14:21:45 crc kubenswrapper[4774]: I1121 14:21:45.051278 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" event={"ID":"e304c11a-e256-4c84-a317-b8b7eadd767a","Type":"ContainerStarted","Data":"ec07d8e704a38bcf5758c63497bf9f0472b91468d4b8a7e1090aa893ff18204c"} Nov 21 14:21:45 crc kubenswrapper[4774]: I1121 14:21:45.051527 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:45 crc kubenswrapper[4774]: I1121 14:21:45.060439 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" event={"ID":"1c1c222a-bb01-48b9-8115-1a4a35278047","Type":"ContainerStarted","Data":"f1c084e7e67e309ffbac23e0b6c15acd527537d3b5696de7ba23bf05570f0978"} Nov 21 14:21:45 crc kubenswrapper[4774]: I1121 14:21:45.060793 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:45 crc kubenswrapper[4774]: I1121 14:21:45.077006 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" podStartSLOduration=3.181187851 podStartE2EDuration="21.076965425s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.967902458 +0000 UTC m=+1076.620101717" lastFinishedPulling="2025-11-21 14:21:43.863680042 +0000 UTC m=+1094.515879291" observedRunningTime="2025-11-21 14:21:45.074479375 +0000 UTC m=+1095.726678634" watchObservedRunningTime="2025-11-21 14:21:45.076965425 +0000 UTC m=+1095.729164684" Nov 21 14:21:45 crc kubenswrapper[4774]: I1121 14:21:45.095948 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" podStartSLOduration=3.437395782 podStartE2EDuration="21.095915515s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:26.145021905 +0000 UTC m=+1076.797221164" lastFinishedPulling="2025-11-21 14:21:43.803541638 +0000 UTC m=+1094.455740897" observedRunningTime="2025-11-21 14:21:45.093183158 +0000 UTC m=+1095.745382427" watchObservedRunningTime="2025-11-21 14:21:45.095915515 +0000 UTC m=+1095.748114784" Nov 21 14:21:48 crc kubenswrapper[4774]: I1121 14:21:48.108550 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" event={"ID":"2f2e330f-c352-4cd9-afd0-bf306e99fb39","Type":"ContainerStarted","Data":"3ae3785b7562266354515fafbd77175ccc1f5d81464193ee4c203b354735f89e"} Nov 21 14:21:48 crc kubenswrapper[4774]: I1121 14:21:48.109519 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:48 crc kubenswrapper[4774]: I1121 14:21:48.110402 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" event={"ID":"3d0b08e2-b552-4a9a-a87d-bfae30f9045d","Type":"ContainerStarted","Data":"86e98bceebf95abd3121031a3ebdf6461cb42367fb54ed311aecff2d7de914ec"} Nov 21 14:21:48 crc kubenswrapper[4774]: I1121 14:21:48.110623 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:48 crc kubenswrapper[4774]: I1121 14:21:48.139086 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" podStartSLOduration=3.609143331 podStartE2EDuration="25.139063701s" podCreationTimestamp="2025-11-21 14:21:23 +0000 UTC" firstStartedPulling="2025-11-21 14:21:25.809402631 +0000 UTC m=+1076.461601890" lastFinishedPulling="2025-11-21 14:21:47.339323001 +0000 UTC m=+1097.991522260" observedRunningTime="2025-11-21 14:21:48.13553833 +0000 UTC m=+1098.787737589" watchObservedRunningTime="2025-11-21 14:21:48.139063701 +0000 UTC m=+1098.791262960" Nov 21 14:21:48 crc kubenswrapper[4774]: I1121 14:21:48.158476 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" podStartSLOduration=2.991109174 podStartE2EDuration="24.158454623s" podCreationTimestamp="2025-11-21 14:21:24 +0000 UTC" firstStartedPulling="2025-11-21 14:21:26.191314544 +0000 UTC m=+1076.843513803" lastFinishedPulling="2025-11-21 14:21:47.358659993 +0000 UTC m=+1098.010859252" observedRunningTime="2025-11-21 14:21:48.153236074 +0000 UTC m=+1098.805435343" watchObservedRunningTime="2025-11-21 14:21:48.158454623 +0000 UTC m=+1098.810653892" Nov 21 14:21:54 crc kubenswrapper[4774]: I1121 14:21:54.692427 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6f8c5b86cb-8qwvm" Nov 21 14:21:54 crc kubenswrapper[4774]: I1121 14:21:54.900951 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-5bdf4f7f7f-mz68p" Nov 21 14:21:54 crc kubenswrapper[4774]: I1121 14:21:54.935719 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-6dc664666c-z95rc" Nov 21 14:21:55 crc kubenswrapper[4774]: I1121 14:21:55.042053 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-7798859c74-ksx5x" Nov 21 14:21:55 crc kubenswrapper[4774]: I1121 14:21:55.052491 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-8464cf66df-8w5rz" Nov 21 14:21:55 crc kubenswrapper[4774]: I1121 14:21:55.117739 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-7cd4fb6f79-mtc8r" Nov 21 14:21:59 crc kubenswrapper[4774]: I1121 14:21:59.601286 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:21:59 crc kubenswrapper[4774]: I1121 14:21:59.601677 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.441308 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5d58585b49-d8r9m"] Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.443695 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.465331 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-shvcf" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.465994 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.466044 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.466219 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.482793 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d58585b49-d8r9m"] Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.530118 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77966f9df5-cblct"] Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.538899 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.541713 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.547901 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77966f9df5-cblct"] Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.571513 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16213164-33b0-4e84-a67e-0d87036d3248-config\") pod \"dnsmasq-dns-5d58585b49-d8r9m\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.571593 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scqxm\" (UniqueName: \"kubernetes.io/projected/16213164-33b0-4e84-a67e-0d87036d3248-kube-api-access-scqxm\") pod \"dnsmasq-dns-5d58585b49-d8r9m\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.673562 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scqxm\" (UniqueName: \"kubernetes.io/projected/16213164-33b0-4e84-a67e-0d87036d3248-kube-api-access-scqxm\") pod \"dnsmasq-dns-5d58585b49-d8r9m\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.673674 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-config\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.673768 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-dns-svc\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.674035 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16213164-33b0-4e84-a67e-0d87036d3248-config\") pod \"dnsmasq-dns-5d58585b49-d8r9m\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.675397 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg76t\" (UniqueName: \"kubernetes.io/projected/cafb861e-f79e-4adb-9f1e-7c114fef4f57-kube-api-access-kg76t\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.675348 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16213164-33b0-4e84-a67e-0d87036d3248-config\") pod \"dnsmasq-dns-5d58585b49-d8r9m\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.697603 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scqxm\" (UniqueName: \"kubernetes.io/projected/16213164-33b0-4e84-a67e-0d87036d3248-kube-api-access-scqxm\") pod \"dnsmasq-dns-5d58585b49-d8r9m\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.766563 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.777288 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-config\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.777420 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-dns-svc\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.778487 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-dns-svc\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.778556 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-config\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.778581 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg76t\" (UniqueName: \"kubernetes.io/projected/cafb861e-f79e-4adb-9f1e-7c114fef4f57-kube-api-access-kg76t\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.802733 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg76t\" (UniqueName: \"kubernetes.io/projected/cafb861e-f79e-4adb-9f1e-7c114fef4f57-kube-api-access-kg76t\") pod \"dnsmasq-dns-77966f9df5-cblct\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:10 crc kubenswrapper[4774]: I1121 14:22:10.866699 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:11 crc kubenswrapper[4774]: I1121 14:22:11.175501 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77966f9df5-cblct"] Nov 21 14:22:11 crc kubenswrapper[4774]: W1121 14:22:11.184670 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcafb861e_f79e_4adb_9f1e_7c114fef4f57.slice/crio-801c03d98762ae9285a4ce6e4d024d6b79329bd8fad2543730091cab8d0a4640 WatchSource:0}: Error finding container 801c03d98762ae9285a4ce6e4d024d6b79329bd8fad2543730091cab8d0a4640: Status 404 returned error can't find the container with id 801c03d98762ae9285a4ce6e4d024d6b79329bd8fad2543730091cab8d0a4640 Nov 21 14:22:11 crc kubenswrapper[4774]: I1121 14:22:11.189756 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:22:11 crc kubenswrapper[4774]: I1121 14:22:11.275322 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5d58585b49-d8r9m"] Nov 21 14:22:11 crc kubenswrapper[4774]: W1121 14:22:11.279887 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16213164_33b0_4e84_a67e_0d87036d3248.slice/crio-e99ef11f025eeb1e6104b1bba68d39af983c2dc1c91bae7080ea1f23483d74fb WatchSource:0}: Error finding container e99ef11f025eeb1e6104b1bba68d39af983c2dc1c91bae7080ea1f23483d74fb: Status 404 returned error can't find the container with id e99ef11f025eeb1e6104b1bba68d39af983c2dc1c91bae7080ea1f23483d74fb Nov 21 14:22:11 crc kubenswrapper[4774]: I1121 14:22:11.290637 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77966f9df5-cblct" event={"ID":"cafb861e-f79e-4adb-9f1e-7c114fef4f57","Type":"ContainerStarted","Data":"801c03d98762ae9285a4ce6e4d024d6b79329bd8fad2543730091cab8d0a4640"} Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.135532 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d58585b49-d8r9m"] Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.194189 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c84d8598c-rfwm6"] Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.195741 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.214490 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c84d8598c-rfwm6"] Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.298086 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" event={"ID":"16213164-33b0-4e84-a67e-0d87036d3248","Type":"ContainerStarted","Data":"e99ef11f025eeb1e6104b1bba68d39af983c2dc1c91bae7080ea1f23483d74fb"} Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.302261 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-config\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.302347 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvh67\" (UniqueName: \"kubernetes.io/projected/d45fbd36-f3c6-412b-915b-67e3f05dd69b-kube-api-access-zvh67\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.302429 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-dns-svc\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.403613 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvh67\" (UniqueName: \"kubernetes.io/projected/d45fbd36-f3c6-412b-915b-67e3f05dd69b-kube-api-access-zvh67\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.403724 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-dns-svc\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.403763 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-config\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.404907 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-config\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.404903 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-dns-svc\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.427930 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvh67\" (UniqueName: \"kubernetes.io/projected/d45fbd36-f3c6-412b-915b-67e3f05dd69b-kube-api-access-zvh67\") pod \"dnsmasq-dns-7c84d8598c-rfwm6\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.517956 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.924507 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77966f9df5-cblct"] Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.943011 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85965d46c9-5fjb8"] Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.944966 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:12 crc kubenswrapper[4774]: I1121 14:22:12.949130 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85965d46c9-5fjb8"] Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.015980 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-config\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.016111 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-dns-svc\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.016182 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldzhc\" (UniqueName: \"kubernetes.io/projected/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-kube-api-access-ldzhc\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.055381 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c84d8598c-rfwm6"] Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.117850 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-dns-svc\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.118009 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldzhc\" (UniqueName: \"kubernetes.io/projected/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-kube-api-access-ldzhc\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.118119 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-config\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.119055 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-dns-svc\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.121639 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-config\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.170685 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldzhc\" (UniqueName: \"kubernetes.io/projected/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-kube-api-access-ldzhc\") pod \"dnsmasq-dns-85965d46c9-5fjb8\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.274948 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.317604 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" event={"ID":"d45fbd36-f3c6-412b-915b-67e3f05dd69b","Type":"ContainerStarted","Data":"84f23246570b23c77b094ed236fb44894ad1f2ec4ab714192fc9ad6c1bcaea53"} Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.386349 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.387766 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.392686 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-m2xfw" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.392999 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.393152 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.393179 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.393323 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.393396 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.393488 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.406201 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556152 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2685b76-2150-4209-a55b-a989ae40b7db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556686 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556748 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556789 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556842 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556868 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556932 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z667l\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-kube-api-access-z667l\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.556966 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2685b76-2150-4209-a55b-a989ae40b7db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.557015 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.557051 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.557372 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659152 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z667l\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-kube-api-access-z667l\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659212 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2685b76-2150-4209-a55b-a989ae40b7db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659235 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659267 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659291 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659339 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2685b76-2150-4209-a55b-a989ae40b7db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659389 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659437 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659469 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659502 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.659534 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.660352 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.661681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.662080 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.662093 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.662434 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.666137 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.667720 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.671285 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.675974 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2685b76-2150-4209-a55b-a989ae40b7db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.683801 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2685b76-2150-4209-a55b-a989ae40b7db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.688748 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z667l\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-kube-api-access-z667l\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.695801 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.771621 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 14:22:13 crc kubenswrapper[4774]: I1121 14:22:13.877542 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85965d46c9-5fjb8"] Nov 21 14:22:13 crc kubenswrapper[4774]: W1121 14:22:13.901462 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod225e22d4_dd8a_4e49_a6a1_ebcf9b47c45b.slice/crio-13be3529ba0facdbc15f07a68e1a41f857252133f4e5b56ca4434edead5f5ced WatchSource:0}: Error finding container 13be3529ba0facdbc15f07a68e1a41f857252133f4e5b56ca4434edead5f5ced: Status 404 returned error can't find the container with id 13be3529ba0facdbc15f07a68e1a41f857252133f4e5b56ca4434edead5f5ced Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.054045 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.056114 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.060214 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-2d55d" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.061037 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.061131 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.061228 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.061577 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.063384 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.063724 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.076076 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175225 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175319 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175339 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175398 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175434 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/64e33a39-c371-477f-b1c9-d58189db4bc8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175451 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxzhf\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-kube-api-access-hxzhf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175733 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.175936 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.176079 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.176154 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/64e33a39-c371-477f-b1c9-d58189db4bc8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.176205 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277657 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277725 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277768 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277792 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/64e33a39-c371-477f-b1c9-d58189db4bc8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277837 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277881 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277899 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277919 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277966 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.277988 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxzhf\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-kube-api-access-hxzhf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.278005 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/64e33a39-c371-477f-b1c9-d58189db4bc8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.279263 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.280462 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.280753 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.280784 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.281331 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.283810 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.291334 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/64e33a39-c371-477f-b1c9-d58189db4bc8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.293220 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/64e33a39-c371-477f-b1c9-d58189db4bc8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.295885 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.319210 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.319835 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxzhf\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-kube-api-access-hxzhf\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.324033 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.336691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" event={"ID":"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b","Type":"ContainerStarted","Data":"13be3529ba0facdbc15f07a68e1a41f857252133f4e5b56ca4434edead5f5ced"} Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.395013 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:22:14 crc kubenswrapper[4774]: W1121 14:22:14.416654 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2685b76_2150_4209_a55b_a989ae40b7db.slice/crio-3f1e87645ff873cb4e896241e1e200680f38a3108fb04275c3485c50a8840fc2 WatchSource:0}: Error finding container 3f1e87645ff873cb4e896241e1e200680f38a3108fb04275c3485c50a8840fc2: Status 404 returned error can't find the container with id 3f1e87645ff873cb4e896241e1e200680f38a3108fb04275c3485c50a8840fc2 Nov 21 14:22:14 crc kubenswrapper[4774]: I1121 14:22:14.426094 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.033994 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:22:15 crc kubenswrapper[4774]: W1121 14:22:15.073948 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64e33a39_c371_477f_b1c9_d58189db4bc8.slice/crio-dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00 WatchSource:0}: Error finding container dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00: Status 404 returned error can't find the container with id dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00 Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.291160 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.324380 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.324588 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.329714 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.334451 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-4stkb" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.339003 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.339864 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.353580 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.374950 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2685b76-2150-4209-a55b-a989ae40b7db","Type":"ContainerStarted","Data":"3f1e87645ff873cb4e896241e1e200680f38a3108fb04275c3485c50a8840fc2"} Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.380406 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"64e33a39-c371-477f-b1c9-d58189db4bc8","Type":"ContainerStarted","Data":"dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00"} Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525065 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525193 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-default\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525220 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525245 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525271 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525293 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-kolla-config\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525310 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.525332 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2gl9\" (UniqueName: \"kubernetes.io/projected/7030e5d8-2d2b-4cc5-a283-339599595a18-kube-api-access-k2gl9\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628158 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-default\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628510 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628535 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628563 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628585 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-kolla-config\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628602 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628624 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2gl9\" (UniqueName: \"kubernetes.io/projected/7030e5d8-2d2b-4cc5-a283-339599595a18-kube-api-access-k2gl9\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.628662 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.629445 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-default\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.629721 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.631319 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.631375 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.632860 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-kolla-config\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.651263 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.659568 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.673529 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.674441 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2gl9\" (UniqueName: \"kubernetes.io/projected/7030e5d8-2d2b-4cc5-a283-339599595a18-kube-api-access-k2gl9\") pod \"openstack-galera-0\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " pod="openstack/openstack-galera-0" Nov 21 14:22:15 crc kubenswrapper[4774]: I1121 14:22:15.981053 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.666841 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 14:22:16 crc kubenswrapper[4774]: W1121 14:22:16.677370 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7030e5d8_2d2b_4cc5_a283_339599595a18.slice/crio-3ff256fc9339b1b56af433c2e3b1578584343f46f5866e2f99bc3016dfa20d81 WatchSource:0}: Error finding container 3ff256fc9339b1b56af433c2e3b1578584343f46f5866e2f99bc3016dfa20d81: Status 404 returned error can't find the container with id 3ff256fc9339b1b56af433c2e3b1578584343f46f5866e2f99bc3016dfa20d81 Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.700383 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.703248 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.708556 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.708998 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.710224 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-tkph9" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.710431 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.730990 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.851984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.853509 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.853802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.853887 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.853920 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.853987 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6zhg\" (UniqueName: \"kubernetes.io/projected/8b17b723-7e23-4a12-916e-0f2d00b72239-kube-api-access-j6zhg\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.854071 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.854098 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957591 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957674 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957712 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957748 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6zhg\" (UniqueName: \"kubernetes.io/projected/8b17b723-7e23-4a12-916e-0f2d00b72239-kube-api-access-j6zhg\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957782 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957798 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957864 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.957893 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.959052 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.959211 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.962481 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.962534 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.965660 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.981748 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.982270 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:16 crc kubenswrapper[4774]: I1121 14:22:16.988998 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6zhg\" (UniqueName: \"kubernetes.io/projected/8b17b723-7e23-4a12-916e-0f2d00b72239-kube-api-access-j6zhg\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.000731 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.002516 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.007468 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-cd62f" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.008459 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.012251 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.014332 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.079919 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.160909 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-config-data\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.160979 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztnvx\" (UniqueName: \"kubernetes.io/projected/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kube-api-access-ztnvx\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.161020 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kolla-config\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.161062 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.161098 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.263266 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-config-data\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.263324 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztnvx\" (UniqueName: \"kubernetes.io/projected/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kube-api-access-ztnvx\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.263357 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kolla-config\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.263395 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.263427 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.264718 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kolla-config\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.264924 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-config-data\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.280693 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.282111 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.282206 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztnvx\" (UniqueName: \"kubernetes.io/projected/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kube-api-access-ztnvx\") pod \"memcached-0\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.363349 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.373473 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.407915 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7030e5d8-2d2b-4cc5-a283-339599595a18","Type":"ContainerStarted","Data":"3ff256fc9339b1b56af433c2e3b1578584343f46f5866e2f99bc3016dfa20d81"} Nov 21 14:22:17 crc kubenswrapper[4774]: I1121 14:22:17.995371 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.148113 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 21 14:22:18 crc kubenswrapper[4774]: W1121 14:22:18.174493 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf38fad89_cd6f_47d4_82f9_a761f6a9ed9e.slice/crio-b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7 WatchSource:0}: Error finding container b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7: Status 404 returned error can't find the container with id b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7 Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.463033 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8b17b723-7e23-4a12-916e-0f2d00b72239","Type":"ContainerStarted","Data":"96e5350069d68cc9fb8fa727f566111d417236174d4f0857a9d8162adf07265d"} Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.467009 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e","Type":"ContainerStarted","Data":"b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7"} Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.906355 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.907885 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.912375 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-kx4wr" Nov 21 14:22:18 crc kubenswrapper[4774]: I1121 14:22:18.933578 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:22:19 crc kubenswrapper[4774]: I1121 14:22:19.024515 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8kpv\" (UniqueName: \"kubernetes.io/projected/3e0a71b0-ad47-44f0-9c49-59a1430418b8-kube-api-access-c8kpv\") pod \"kube-state-metrics-0\" (UID: \"3e0a71b0-ad47-44f0-9c49-59a1430418b8\") " pod="openstack/kube-state-metrics-0" Nov 21 14:22:19 crc kubenswrapper[4774]: I1121 14:22:19.125906 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8kpv\" (UniqueName: \"kubernetes.io/projected/3e0a71b0-ad47-44f0-9c49-59a1430418b8-kube-api-access-c8kpv\") pod \"kube-state-metrics-0\" (UID: \"3e0a71b0-ad47-44f0-9c49-59a1430418b8\") " pod="openstack/kube-state-metrics-0" Nov 21 14:22:19 crc kubenswrapper[4774]: I1121 14:22:19.178268 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8kpv\" (UniqueName: \"kubernetes.io/projected/3e0a71b0-ad47-44f0-9c49-59a1430418b8-kube-api-access-c8kpv\") pod \"kube-state-metrics-0\" (UID: \"3e0a71b0-ad47-44f0-9c49-59a1430418b8\") " pod="openstack/kube-state-metrics-0" Nov 21 14:22:19 crc kubenswrapper[4774]: I1121 14:22:19.280018 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:22:19 crc kubenswrapper[4774]: I1121 14:22:19.810960 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:22:19 crc kubenswrapper[4774]: W1121 14:22:19.837119 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e0a71b0_ad47_44f0_9c49_59a1430418b8.slice/crio-450725972d1beec310bb5828f66c7775a4419d4033d9b6e79e1b97145960f1fe WatchSource:0}: Error finding container 450725972d1beec310bb5828f66c7775a4419d4033d9b6e79e1b97145960f1fe: Status 404 returned error can't find the container with id 450725972d1beec310bb5828f66c7775a4419d4033d9b6e79e1b97145960f1fe Nov 21 14:22:20 crc kubenswrapper[4774]: I1121 14:22:20.552950 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3e0a71b0-ad47-44f0-9c49-59a1430418b8","Type":"ContainerStarted","Data":"450725972d1beec310bb5828f66c7775a4419d4033d9b6e79e1b97145960f1fe"} Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.004522 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2sxpw"] Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.006508 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.015307 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.015435 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-ffrh4" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.015710 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.032202 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-ld98r"] Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.034684 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.046361 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sxpw"] Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.073100 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ld98r"] Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096179 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gk9c\" (UniqueName: \"kubernetes.io/projected/7ee04f12-987f-4f31-81b3-10cd067af310-kube-api-access-5gk9c\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096235 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-etc-ovs\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096266 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-log-ovn\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096283 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-run\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096335 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096384 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-combined-ca-bundle\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096408 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ee04f12-987f-4f31-81b3-10cd067af310-scripts\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096431 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gf6vj\" (UniqueName: \"kubernetes.io/projected/124a9a6f-df08-4085-96d6-0a72f2bb2855-kube-api-access-gf6vj\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096647 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-ovn-controller-tls-certs\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096720 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/124a9a6f-df08-4085-96d6-0a72f2bb2855-scripts\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096789 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-lib\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096844 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-log\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.096955 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run-ovn\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199447 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-combined-ca-bundle\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ee04f12-987f-4f31-81b3-10cd067af310-scripts\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199535 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gf6vj\" (UniqueName: \"kubernetes.io/projected/124a9a6f-df08-4085-96d6-0a72f2bb2855-kube-api-access-gf6vj\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199586 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-ovn-controller-tls-certs\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199603 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/124a9a6f-df08-4085-96d6-0a72f2bb2855-scripts\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199628 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-lib\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199647 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-log\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199684 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run-ovn\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199717 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gk9c\" (UniqueName: \"kubernetes.io/projected/7ee04f12-987f-4f31-81b3-10cd067af310-kube-api-access-5gk9c\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199777 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-etc-ovs\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199799 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-log-ovn\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199835 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-run\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.199857 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.200588 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.200721 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-lib\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.200863 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-log\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.201040 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-etc-ovs\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.201102 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-run\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.201139 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-log-ovn\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.201227 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run-ovn\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.203136 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ee04f12-987f-4f31-81b3-10cd067af310-scripts\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.204811 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/124a9a6f-df08-4085-96d6-0a72f2bb2855-scripts\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.226241 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-combined-ca-bundle\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.226716 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-ovn-controller-tls-certs\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.236857 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gk9c\" (UniqueName: \"kubernetes.io/projected/7ee04f12-987f-4f31-81b3-10cd067af310-kube-api-access-5gk9c\") pod \"ovn-controller-2sxpw\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.263262 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gf6vj\" (UniqueName: \"kubernetes.io/projected/124a9a6f-df08-4085-96d6-0a72f2bb2855-kube-api-access-gf6vj\") pod \"ovn-controller-ovs-ld98r\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.351734 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:22 crc kubenswrapper[4774]: I1121 14:22:22.360454 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.797159 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.799773 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.802843 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.803077 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.803110 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.803501 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.806857 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-zdm7k" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.809917 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862116 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862172 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862221 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862248 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862296 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwffp\" (UniqueName: \"kubernetes.io/projected/0563658a-f1e8-4cae-b165-9697c4673895-kube-api-access-lwffp\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862322 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862352 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0563658a-f1e8-4cae-b165-9697c4673895-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.862381 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-config\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.963995 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.964065 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.964457 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwffp\" (UniqueName: \"kubernetes.io/projected/0563658a-f1e8-4cae-b165-9697c4673895-kube-api-access-lwffp\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.964754 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.964946 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0563658a-f1e8-4cae-b165-9697c4673895-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.964993 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-config\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.965014 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.965049 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.965437 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0563658a-f1e8-4cae-b165-9697c4673895-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.965608 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.965987 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.966292 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-config\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.969937 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.975638 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.987295 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:23 crc kubenswrapper[4774]: I1121 14:22:23.988032 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwffp\" (UniqueName: \"kubernetes.io/projected/0563658a-f1e8-4cae-b165-9697c4673895-kube-api-access-lwffp\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:24 crc kubenswrapper[4774]: I1121 14:22:24.012079 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:24 crc kubenswrapper[4774]: I1121 14:22:24.174484 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.485488 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.487410 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.491700 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-84lnp" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.491994 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.492201 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.492398 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.498970 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602184 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602307 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602361 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-config\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602408 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602444 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602478 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602527 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wg79\" (UniqueName: \"kubernetes.io/projected/db7f3cb4-269e-443e-836e-caae1c2d122f-kube-api-access-8wg79\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.602554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.704194 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.704688 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-config\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.704745 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.705488 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.705943 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-config\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.704777 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.706031 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.706083 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wg79\" (UniqueName: \"kubernetes.io/projected/db7f3cb4-269e-443e-836e-caae1c2d122f-kube-api-access-8wg79\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.706124 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.706160 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.706626 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.707677 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.721868 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.722507 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.722886 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.727324 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wg79\" (UniqueName: \"kubernetes.io/projected/db7f3cb4-269e-443e-836e-caae1c2d122f-kube-api-access-8wg79\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.751499 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:25 crc kubenswrapper[4774]: I1121 14:22:25.808584 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 21 14:22:29 crc kubenswrapper[4774]: I1121 14:22:29.605197 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:22:29 crc kubenswrapper[4774]: I1121 14:22:29.605642 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:22:49 crc kubenswrapper[4774]: E1121 14:22:49.496770 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:3db27fbfc3fabe2f62c68ab1b9f24383a73554f2d6d1f178147088832619013a" Nov 21 14:22:49 crc kubenswrapper[4774]: E1121 14:22:49.497724 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:3db27fbfc3fabe2f62c68ab1b9f24383a73554f2d6d1f178147088832619013a,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k2gl9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(7030e5d8-2d2b-4cc5-a283-339599595a18): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:49 crc kubenswrapper[4774]: E1121 14:22:49.500941 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" Nov 21 14:22:49 crc kubenswrapper[4774]: E1121 14:22:49.820212 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:3db27fbfc3fabe2f62c68ab1b9f24383a73554f2d6d1f178147088832619013a\\\"\"" pod="openstack/openstack-galera-0" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" Nov 21 14:22:50 crc kubenswrapper[4774]: E1121 14:22:50.252756 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached@sha256:0b7651290cfea3b21b9232a1343849a0fbfe605e1aa85e22c5090a38139d6015" Nov 21 14:22:50 crc kubenswrapper[4774]: E1121 14:22:50.253273 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached@sha256:0b7651290cfea3b21b9232a1343849a0fbfe605e1aa85e22c5090a38139d6015,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n5f6h76hbbh89h688h5bfh77h579h68bh78h5fbh9bh5b8h5bh8bh5f7h56ch5ch564hb6h5b9hf4h588h5h58ch5f8h548h5f8h5h544h56fh549q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ztnvx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(f38fad89-cd6f-47d4-82f9-a761f6a9ed9e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:50 crc kubenswrapper[4774]: E1121 14:22:50.254967 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" Nov 21 14:22:50 crc kubenswrapper[4774]: E1121 14:22:50.835514 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached@sha256:0b7651290cfea3b21b9232a1343849a0fbfe605e1aa85e22c5090a38139d6015\\\"\"" pod="openstack/memcached-0" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.611465 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:8f8ade19bc904e0b06eaa2d55539cdebb1df40512845962a7aa672223332df90" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.611689 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:8f8ade19bc904e0b06eaa2d55539cdebb1df40512845962a7aa672223332df90,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hxzhf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(64e33a39-c371-477f-b1c9-d58189db4bc8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.613066 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.619782 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:8f8ade19bc904e0b06eaa2d55539cdebb1df40512845962a7aa672223332df90" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.620126 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:8f8ade19bc904e0b06eaa2d55539cdebb1df40512845962a7aa672223332df90,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z667l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(e2685b76-2150-4209-a55b-a989ae40b7db): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.621564 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.622357 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.622422 4774 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.622563 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-c8kpv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(3e0a71b0-ad47-44f0-9c49-59a1430418b8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" logger="UnhandledError" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.623911 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.626293 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb@sha256:3db27fbfc3fabe2f62c68ab1b9f24383a73554f2d6d1f178147088832619013a" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.626426 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:3db27fbfc3fabe2f62c68ab1b9f24383a73554f2d6d1f178147088832619013a,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j6zhg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(8b17b723-7e23-4a12-916e-0f2d00b72239): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.628540 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.850158 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics@sha256:db384bf43222b066c378e77027a675d4cd9911107adba46c2922b3a55e10d6fb\\\"\"" pod="openstack/kube-state-metrics-0" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.850593 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:8f8ade19bc904e0b06eaa2d55539cdebb1df40512845962a7aa672223332df90\\\"\"" pod="openstack/rabbitmq-server-0" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.852125 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq@sha256:8f8ade19bc904e0b06eaa2d55539cdebb1df40512845962a7aa672223332df90\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" Nov 21 14:22:51 crc kubenswrapper[4774]: E1121 14:22:51.852935 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb@sha256:3db27fbfc3fabe2f62c68ab1b9f24383a73554f2d6d1f178147088832619013a\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.497731 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.498362 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kg76t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-77966f9df5-cblct_openstack(cafb861e-f79e-4adb-9f1e-7c114fef4f57): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.499722 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-77966f9df5-cblct" podUID="cafb861e-f79e-4adb-9f1e-7c114fef4f57" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.503108 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.503258 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zvh67,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-7c84d8598c-rfwm6_openstack(d45fbd36-f3c6-412b-915b-67e3f05dd69b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.504412 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" podUID="d45fbd36-f3c6-412b-915b-67e3f05dd69b" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.528047 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.528218 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-scqxm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5d58585b49-d8r9m_openstack(16213164-33b0-4e84-a67e-0d87036d3248): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.529589 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" podUID="16213164-33b0-4e84-a67e-0d87036d3248" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.542201 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.542342 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ldzhc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-85965d46c9-5fjb8_openstack(225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.543603 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" podUID="225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.858149 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3\\\"\"" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" podUID="225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b" Nov 21 14:22:52 crc kubenswrapper[4774]: E1121 14:22:52.858491 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server@sha256:a0e6062f505fbc848d62675995abc3806bc5c12530d3d41ed16066e07f71b2d3\\\"\"" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" podUID="d45fbd36-f3c6-412b-915b-67e3f05dd69b" Nov 21 14:22:52 crc kubenswrapper[4774]: I1121 14:22:52.977154 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sxpw"] Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.181870 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.270129 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-ld98r"] Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.299597 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.307787 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.495030 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-dns-svc\") pod \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.495240 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16213164-33b0-4e84-a67e-0d87036d3248-config\") pod \"16213164-33b0-4e84-a67e-0d87036d3248\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.495287 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-config\") pod \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.495399 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scqxm\" (UniqueName: \"kubernetes.io/projected/16213164-33b0-4e84-a67e-0d87036d3248-kube-api-access-scqxm\") pod \"16213164-33b0-4e84-a67e-0d87036d3248\" (UID: \"16213164-33b0-4e84-a67e-0d87036d3248\") " Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.496107 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cafb861e-f79e-4adb-9f1e-7c114fef4f57" (UID: "cafb861e-f79e-4adb-9f1e-7c114fef4f57"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.496107 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16213164-33b0-4e84-a67e-0d87036d3248-config" (OuterVolumeSpecName: "config") pod "16213164-33b0-4e84-a67e-0d87036d3248" (UID: "16213164-33b0-4e84-a67e-0d87036d3248"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.496494 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-config" (OuterVolumeSpecName: "config") pod "cafb861e-f79e-4adb-9f1e-7c114fef4f57" (UID: "cafb861e-f79e-4adb-9f1e-7c114fef4f57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.497115 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg76t\" (UniqueName: \"kubernetes.io/projected/cafb861e-f79e-4adb-9f1e-7c114fef4f57-kube-api-access-kg76t\") pod \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\" (UID: \"cafb861e-f79e-4adb-9f1e-7c114fef4f57\") " Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.497750 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16213164-33b0-4e84-a67e-0d87036d3248-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.497780 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.497794 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cafb861e-f79e-4adb-9f1e-7c114fef4f57-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.503402 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16213164-33b0-4e84-a67e-0d87036d3248-kube-api-access-scqxm" (OuterVolumeSpecName: "kube-api-access-scqxm") pod "16213164-33b0-4e84-a67e-0d87036d3248" (UID: "16213164-33b0-4e84-a67e-0d87036d3248"). InnerVolumeSpecName "kube-api-access-scqxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.503654 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cafb861e-f79e-4adb-9f1e-7c114fef4f57-kube-api-access-kg76t" (OuterVolumeSpecName: "kube-api-access-kg76t") pod "cafb861e-f79e-4adb-9f1e-7c114fef4f57" (UID: "cafb861e-f79e-4adb-9f1e-7c114fef4f57"). InnerVolumeSpecName "kube-api-access-kg76t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.600416 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scqxm\" (UniqueName: \"kubernetes.io/projected/16213164-33b0-4e84-a67e-0d87036d3248-kube-api-access-scqxm\") on node \"crc\" DevicePath \"\"" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.600476 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg76t\" (UniqueName: \"kubernetes.io/projected/cafb861e-f79e-4adb-9f1e-7c114fef4f57-kube-api-access-kg76t\") on node \"crc\" DevicePath \"\"" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.866508 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77966f9df5-cblct" event={"ID":"cafb861e-f79e-4adb-9f1e-7c114fef4f57","Type":"ContainerDied","Data":"801c03d98762ae9285a4ce6e4d024d6b79329bd8fad2543730091cab8d0a4640"} Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.866991 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77966f9df5-cblct" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.875799 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerStarted","Data":"cdbcbbaf9d43029e5d48c9f20c7b0821e73c5c3c6edc8665165b2502719db633"} Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.878313 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw" event={"ID":"7ee04f12-987f-4f31-81b3-10cd067af310","Type":"ContainerStarted","Data":"2cf529dd42f1272162146cff715406bf4613ff943c312180b80c25fd82785d9b"} Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.880164 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db7f3cb4-269e-443e-836e-caae1c2d122f","Type":"ContainerStarted","Data":"3c0e65da300bee1fbf43896f5ef986ca282acfedd88e9aeaf9dd8a6a98064629"} Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.887943 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" event={"ID":"16213164-33b0-4e84-a67e-0d87036d3248","Type":"ContainerDied","Data":"e99ef11f025eeb1e6104b1bba68d39af983c2dc1c91bae7080ea1f23483d74fb"} Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.888013 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5d58585b49-d8r9m" Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.983879 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77966f9df5-cblct"] Nov 21 14:22:53 crc kubenswrapper[4774]: I1121 14:22:53.991459 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77966f9df5-cblct"] Nov 21 14:22:54 crc kubenswrapper[4774]: I1121 14:22:54.036328 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5d58585b49-d8r9m"] Nov 21 14:22:54 crc kubenswrapper[4774]: I1121 14:22:54.045865 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5d58585b49-d8r9m"] Nov 21 14:22:54 crc kubenswrapper[4774]: I1121 14:22:54.103722 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16213164-33b0-4e84-a67e-0d87036d3248" path="/var/lib/kubelet/pods/16213164-33b0-4e84-a67e-0d87036d3248/volumes" Nov 21 14:22:54 crc kubenswrapper[4774]: I1121 14:22:54.104205 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cafb861e-f79e-4adb-9f1e-7c114fef4f57" path="/var/lib/kubelet/pods/cafb861e-f79e-4adb-9f1e-7c114fef4f57/volumes" Nov 21 14:22:54 crc kubenswrapper[4774]: I1121 14:22:54.269993 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 14:22:54 crc kubenswrapper[4774]: W1121 14:22:54.287586 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0563658a_f1e8_4cae_b165_9697c4673895.slice/crio-34351f6a1d2eb3c687fdd7ca72f122de3499b79351c524d83e90ebcec1c1143a WatchSource:0}: Error finding container 34351f6a1d2eb3c687fdd7ca72f122de3499b79351c524d83e90ebcec1c1143a: Status 404 returned error can't find the container with id 34351f6a1d2eb3c687fdd7ca72f122de3499b79351c524d83e90ebcec1c1143a Nov 21 14:22:54 crc kubenswrapper[4774]: I1121 14:22:54.897171 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0563658a-f1e8-4cae-b165-9697c4673895","Type":"ContainerStarted","Data":"34351f6a1d2eb3c687fdd7ca72f122de3499b79351c524d83e90ebcec1c1143a"} Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.927626 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw" event={"ID":"7ee04f12-987f-4f31-81b3-10cd067af310","Type":"ContainerStarted","Data":"5835b1e71040c97609879beff0fc752dee4bbaaacdb26af845a02a7a42242f5c"} Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.930969 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-2sxpw" Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.935097 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db7f3cb4-269e-443e-836e-caae1c2d122f","Type":"ContainerStarted","Data":"ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2"} Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.938535 4774 generic.go:334] "Generic (PLEG): container finished" podID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerID="bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3" exitCode=0 Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.938765 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerDied","Data":"bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3"} Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.942398 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0563658a-f1e8-4cae-b165-9697c4673895","Type":"ContainerStarted","Data":"6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1"} Nov 21 14:22:57 crc kubenswrapper[4774]: I1121 14:22:57.955350 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-2sxpw" podStartSLOduration=33.238906567 podStartE2EDuration="36.955326227s" podCreationTimestamp="2025-11-21 14:22:21 +0000 UTC" firstStartedPulling="2025-11-21 14:22:53.028289606 +0000 UTC m=+1163.680488865" lastFinishedPulling="2025-11-21 14:22:56.744709266 +0000 UTC m=+1167.396908525" observedRunningTime="2025-11-21 14:22:57.947885573 +0000 UTC m=+1168.600084842" watchObservedRunningTime="2025-11-21 14:22:57.955326227 +0000 UTC m=+1168.607525486" Nov 21 14:22:58 crc kubenswrapper[4774]: I1121 14:22:58.958654 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerStarted","Data":"08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3"} Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.601360 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.601864 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.601923 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.602923 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3354716a800c28fc56d313636d4868697077dddaabfb1fc36da33f6ee413381b"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.602988 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://3354716a800c28fc56d313636d4868697077dddaabfb1fc36da33f6ee413381b" gracePeriod=600 Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.971275 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="3354716a800c28fc56d313636d4868697077dddaabfb1fc36da33f6ee413381b" exitCode=0 Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.971348 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"3354716a800c28fc56d313636d4868697077dddaabfb1fc36da33f6ee413381b"} Nov 21 14:22:59 crc kubenswrapper[4774]: I1121 14:22:59.971400 4774 scope.go:117] "RemoveContainer" containerID="60358f57ea897b7d0cc072aaadbd84c8627ffc28289a543329a6b20ec347a65d" Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.004335 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"50f0abd54c499ac14c722ce78a1be249e3c65fdd8bde5f56a8b5c580514c52ff"} Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.007751 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db7f3cb4-269e-443e-836e-caae1c2d122f","Type":"ContainerStarted","Data":"f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79"} Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.014627 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0563658a-f1e8-4cae-b165-9697c4673895","Type":"ContainerStarted","Data":"2c34ab166ca72aebd1fd6aa1a5cc31cb68ddf856a803c3f00aa0f1b318e937ff"} Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.049463 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=29.644536331 podStartE2EDuration="37.049409464s" podCreationTimestamp="2025-11-21 14:22:24 +0000 UTC" firstStartedPulling="2025-11-21 14:22:53.224317761 +0000 UTC m=+1163.876517020" lastFinishedPulling="2025-11-21 14:23:00.629190894 +0000 UTC m=+1171.281390153" observedRunningTime="2025-11-21 14:23:01.044829442 +0000 UTC m=+1171.697028721" watchObservedRunningTime="2025-11-21 14:23:01.049409464 +0000 UTC m=+1171.701608723" Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.079227 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=32.718537957 podStartE2EDuration="39.079198939s" podCreationTimestamp="2025-11-21 14:22:22 +0000 UTC" firstStartedPulling="2025-11-21 14:22:54.296396951 +0000 UTC m=+1164.948596210" lastFinishedPulling="2025-11-21 14:23:00.657057933 +0000 UTC m=+1171.309257192" observedRunningTime="2025-11-21 14:23:01.074199925 +0000 UTC m=+1171.726399194" watchObservedRunningTime="2025-11-21 14:23:01.079198939 +0000 UTC m=+1171.731398198" Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.809206 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 21 14:23:01 crc kubenswrapper[4774]: I1121 14:23:01.860254 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.026179 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerStarted","Data":"b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd"} Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.027013 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.060355 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-ld98r" podStartSLOduration=37.613835326 podStartE2EDuration="41.060325276s" podCreationTimestamp="2025-11-21 14:22:21 +0000 UTC" firstStartedPulling="2025-11-21 14:22:53.296649622 +0000 UTC m=+1163.948848871" lastFinishedPulling="2025-11-21 14:22:56.743139562 +0000 UTC m=+1167.395338821" observedRunningTime="2025-11-21 14:23:02.055318232 +0000 UTC m=+1172.707517501" watchObservedRunningTime="2025-11-21 14:23:02.060325276 +0000 UTC m=+1172.712524535" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.073789 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.298501 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c84d8598c-rfwm6"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.327730 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-sdw4x"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.330478 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.339883 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.361017 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.361084 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.372350 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77dd6f7c47-bwnjg"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.374462 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.384968 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.398233 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-sdw4x"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.429784 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77dd6f7c47-bwnjg"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.501177 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qmts\" (UniqueName: \"kubernetes.io/projected/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-kube-api-access-7qmts\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.501257 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-config\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.501300 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovs-rundir\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.501606 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-ovsdbserver-nb\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.501693 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klnwd\" (UniqueName: \"kubernetes.io/projected/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-kube-api-access-klnwd\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.502107 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovn-rundir\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.502236 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-config\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.502265 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.502302 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-dns-svc\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.502372 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-combined-ca-bundle\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.605446 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovn-rundir\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.605504 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-config\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.605527 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.605553 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-dns-svc\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.605589 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-combined-ca-bundle\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.606108 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qmts\" (UniqueName: \"kubernetes.io/projected/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-kube-api-access-7qmts\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.606165 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-config\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.606193 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovs-rundir\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.606234 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-ovsdbserver-nb\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.606274 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klnwd\" (UniqueName: \"kubernetes.io/projected/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-kube-api-access-klnwd\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.608645 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovs-rundir\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.608858 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovn-rundir\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.608862 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-dns-svc\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.609961 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-config\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.610062 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-ovsdbserver-nb\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.610400 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-config\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.630611 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.636439 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-combined-ca-bundle\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.651602 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qmts\" (UniqueName: \"kubernetes.io/projected/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-kube-api-access-7qmts\") pod \"dnsmasq-dns-77dd6f7c47-bwnjg\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.653426 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klnwd\" (UniqueName: \"kubernetes.io/projected/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-kube-api-access-klnwd\") pod \"ovn-controller-metrics-sdw4x\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.662654 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.719347 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.875415 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85965d46c9-5fjb8"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.971318 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.974783 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78bc665c87-n2whg"] Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.976960 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.980933 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 21 14:23:02 crc kubenswrapper[4774]: I1121 14:23:02.985767 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78bc665c87-n2whg"] Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028353 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-dns-svc\") pod \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028411 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-config\") pod \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028544 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvh67\" (UniqueName: \"kubernetes.io/projected/d45fbd36-f3c6-412b-915b-67e3f05dd69b-kube-api-access-zvh67\") pod \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\" (UID: \"d45fbd36-f3c6-412b-915b-67e3f05dd69b\") " Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028692 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-config\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028731 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-sb\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028838 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpxs4\" (UniqueName: \"kubernetes.io/projected/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-kube-api-access-dpxs4\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028874 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-dns-svc\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.028924 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-nb\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.029691 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d45fbd36-f3c6-412b-915b-67e3f05dd69b" (UID: "d45fbd36-f3c6-412b-915b-67e3f05dd69b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.029897 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-config" (OuterVolumeSpecName: "config") pod "d45fbd36-f3c6-412b-915b-67e3f05dd69b" (UID: "d45fbd36-f3c6-412b-915b-67e3f05dd69b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.034029 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d45fbd36-f3c6-412b-915b-67e3f05dd69b-kube-api-access-zvh67" (OuterVolumeSpecName: "kube-api-access-zvh67") pod "d45fbd36-f3c6-412b-915b-67e3f05dd69b" (UID: "d45fbd36-f3c6-412b-915b-67e3f05dd69b"). InnerVolumeSpecName "kube-api-access-zvh67". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.100560 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" event={"ID":"d45fbd36-f3c6-412b-915b-67e3f05dd69b","Type":"ContainerDied","Data":"84f23246570b23c77b094ed236fb44894ad1f2ec4ab714192fc9ad6c1bcaea53"} Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.100605 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c84d8598c-rfwm6" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.135156 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpxs4\" (UniqueName: \"kubernetes.io/projected/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-kube-api-access-dpxs4\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.135587 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-dns-svc\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.136832 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-nb\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.137424 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-config\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.137522 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-sb\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.138293 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-nb\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.139208 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-dns-svc\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.140182 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-config\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.142099 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.142150 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvh67\" (UniqueName: \"kubernetes.io/projected/d45fbd36-f3c6-412b-915b-67e3f05dd69b-kube-api-access-zvh67\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.142198 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d45fbd36-f3c6-412b-915b-67e3f05dd69b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.142104 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-sb\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.175743 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.190478 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpxs4\" (UniqueName: \"kubernetes.io/projected/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-kube-api-access-dpxs4\") pod \"dnsmasq-dns-78bc665c87-n2whg\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.240062 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.330330 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.336033 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c84d8598c-rfwm6"] Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.347743 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c84d8598c-rfwm6"] Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.376910 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.448227 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-config\") pod \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.448354 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-dns-svc\") pod \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.448442 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldzhc\" (UniqueName: \"kubernetes.io/projected/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-kube-api-access-ldzhc\") pod \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\" (UID: \"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b\") " Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.450572 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b" (UID: "225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.450777 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-config" (OuterVolumeSpecName: "config") pod "225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b" (UID: "225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.458049 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-kube-api-access-ldzhc" (OuterVolumeSpecName: "kube-api-access-ldzhc") pod "225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b" (UID: "225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b"). InnerVolumeSpecName "kube-api-access-ldzhc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.551199 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.551686 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldzhc\" (UniqueName: \"kubernetes.io/projected/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-kube-api-access-ldzhc\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.551698 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.616776 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-sdw4x"] Nov 21 14:23:03 crc kubenswrapper[4774]: W1121 14:23:03.622862 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4549a9b5_fb19_4dae_9fee_b03d5d49e95d.slice/crio-eb45778d8c0a689a4260538abba14618f2fb4a419618f25ac6084c13abfb6151 WatchSource:0}: Error finding container eb45778d8c0a689a4260538abba14618f2fb4a419618f25ac6084c13abfb6151: Status 404 returned error can't find the container with id eb45778d8c0a689a4260538abba14618f2fb4a419618f25ac6084c13abfb6151 Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.756930 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77dd6f7c47-bwnjg"] Nov 21 14:23:03 crc kubenswrapper[4774]: I1121 14:23:03.866529 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78bc665c87-n2whg"] Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.107416 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d45fbd36-f3c6-412b-915b-67e3f05dd69b" path="/var/lib/kubelet/pods/d45fbd36-f3c6-412b-915b-67e3f05dd69b/volumes" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.114382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e","Type":"ContainerStarted","Data":"69c2ce68633246110d46b63e32f22397b08e5e0b28d2e21b8332046a0b226d6e"} Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.114710 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.116227 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sdw4x" event={"ID":"4549a9b5-fb19-4dae-9fee-b03d5d49e95d","Type":"ContainerStarted","Data":"eb45778d8c0a689a4260538abba14618f2fb4a419618f25ac6084c13abfb6151"} Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.117224 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" event={"ID":"225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b","Type":"ContainerDied","Data":"13be3529ba0facdbc15f07a68e1a41f857252133f4e5b56ca4434edead5f5ced"} Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.117304 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85965d46c9-5fjb8" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.118170 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.152264 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.383076789 podStartE2EDuration="48.152234909s" podCreationTimestamp="2025-11-21 14:22:16 +0000 UTC" firstStartedPulling="2025-11-21 14:22:18.19140135 +0000 UTC m=+1128.843600609" lastFinishedPulling="2025-11-21 14:23:02.96055947 +0000 UTC m=+1173.612758729" observedRunningTime="2025-11-21 14:23:04.142413837 +0000 UTC m=+1174.794613096" watchObservedRunningTime="2025-11-21 14:23:04.152234909 +0000 UTC m=+1174.804434168" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.169013 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.280117 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85965d46c9-5fjb8"] Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.286412 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85965d46c9-5fjb8"] Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.404147 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.406700 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.410999 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-p95wg" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.411085 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.411315 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.411531 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.429959 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.569613 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.569713 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.569766 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.569793 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-config\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.569809 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.569963 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wwb2\" (UniqueName: \"kubernetes.io/projected/4c16af5b-77af-4097-ad41-42aaa0aac4a1-kube-api-access-8wwb2\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.570014 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-scripts\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673173 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673285 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673323 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-config\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673346 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673371 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wwb2\" (UniqueName: \"kubernetes.io/projected/4c16af5b-77af-4097-ad41-42aaa0aac4a1-kube-api-access-8wwb2\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673390 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-scripts\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673450 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.673714 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.674781 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-scripts\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.674798 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-config\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.707637 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.707730 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.709866 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.712697 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wwb2\" (UniqueName: \"kubernetes.io/projected/4c16af5b-77af-4097-ad41-42aaa0aac4a1-kube-api-access-8wwb2\") pod \"ovn-northd-0\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: I1121 14:23:04.729308 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 21 14:23:04 crc kubenswrapper[4774]: W1121 14:23:04.825196 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeca93d73_f946_48b2_8a7d_93f3ff9fdf90.slice/crio-15bd10890bd281b6668cc8b962a541f90b488cd9882e3a72fd0a8a49299dc5e7 WatchSource:0}: Error finding container 15bd10890bd281b6668cc8b962a541f90b488cd9882e3a72fd0a8a49299dc5e7: Status 404 returned error can't find the container with id 15bd10890bd281b6668cc8b962a541f90b488cd9882e3a72fd0a8a49299dc5e7 Nov 21 14:23:04 crc kubenswrapper[4774]: W1121 14:23:04.826751 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef0c3332_e2c0_4e02_932d_ce49c5946ce4.slice/crio-93493d453f399ed4899ee79fcd85c60bbb6e39b3195e09d5c6c8149e03855128 WatchSource:0}: Error finding container 93493d453f399ed4899ee79fcd85c60bbb6e39b3195e09d5c6c8149e03855128: Status 404 returned error can't find the container with id 93493d453f399ed4899ee79fcd85c60bbb6e39b3195e09d5c6c8149e03855128 Nov 21 14:23:05 crc kubenswrapper[4774]: I1121 14:23:05.131721 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" event={"ID":"ef0c3332-e2c0-4e02-932d-ce49c5946ce4","Type":"ContainerStarted","Data":"93493d453f399ed4899ee79fcd85c60bbb6e39b3195e09d5c6c8149e03855128"} Nov 21 14:23:05 crc kubenswrapper[4774]: I1121 14:23:05.137685 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" event={"ID":"eca93d73-f946-48b2-8a7d-93f3ff9fdf90","Type":"ContainerStarted","Data":"15bd10890bd281b6668cc8b962a541f90b488cd9882e3a72fd0a8a49299dc5e7"} Nov 21 14:23:05 crc kubenswrapper[4774]: I1121 14:23:05.160045 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 14:23:05 crc kubenswrapper[4774]: W1121 14:23:05.174162 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c16af5b_77af_4097_ad41_42aaa0aac4a1.slice/crio-fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440 WatchSource:0}: Error finding container fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440: Status 404 returned error can't find the container with id fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440 Nov 21 14:23:06 crc kubenswrapper[4774]: I1121 14:23:06.105461 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b" path="/var/lib/kubelet/pods/225e22d4-dd8a-4e49-a6a1-ebcf9b47c45b/volumes" Nov 21 14:23:06 crc kubenswrapper[4774]: I1121 14:23:06.146627 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2685b76-2150-4209-a55b-a989ae40b7db","Type":"ContainerStarted","Data":"d28eebf85b23a893614d02f00de474df7cb0032d8a129eb8f057b60aeb7a3b5d"} Nov 21 14:23:06 crc kubenswrapper[4774]: I1121 14:23:06.148319 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"64e33a39-c371-477f-b1c9-d58189db4bc8","Type":"ContainerStarted","Data":"c8583eef8a391a28ea2dc5e764d94e0aa5490a82e94adc85f543fff3c67bdb93"} Nov 21 14:23:06 crc kubenswrapper[4774]: I1121 14:23:06.150545 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sdw4x" event={"ID":"4549a9b5-fb19-4dae-9fee-b03d5d49e95d","Type":"ContainerStarted","Data":"ae41437905bab4b8a42a6e934b47544bc731aad356664fbc208508fb4483c6af"} Nov 21 14:23:06 crc kubenswrapper[4774]: I1121 14:23:06.152459 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4c16af5b-77af-4097-ad41-42aaa0aac4a1","Type":"ContainerStarted","Data":"fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440"} Nov 21 14:23:06 crc kubenswrapper[4774]: I1121 14:23:06.230674 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-sdw4x" podStartSLOduration=4.230652738 podStartE2EDuration="4.230652738s" podCreationTimestamp="2025-11-21 14:23:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:23:06.222358499 +0000 UTC m=+1176.874557768" watchObservedRunningTime="2025-11-21 14:23:06.230652738 +0000 UTC m=+1176.882851997" Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.207514 4774 generic.go:334] "Generic (PLEG): container finished" podID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerID="b76020aca53ac9257235cc5beb94b75d8d743299edd5cebaff629a3e207058b5" exitCode=0 Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.207577 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" event={"ID":"ef0c3332-e2c0-4e02-932d-ce49c5946ce4","Type":"ContainerDied","Data":"b76020aca53ac9257235cc5beb94b75d8d743299edd5cebaff629a3e207058b5"} Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.211858 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4c16af5b-77af-4097-ad41-42aaa0aac4a1","Type":"ContainerStarted","Data":"d1073de69bf390fe30269d7b088a8b3fbfa034bdc8ef77499fb5ba4f9878eef7"} Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.211927 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4c16af5b-77af-4097-ad41-42aaa0aac4a1","Type":"ContainerStarted","Data":"382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9"} Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.212079 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.214074 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8b17b723-7e23-4a12-916e-0f2d00b72239","Type":"ContainerStarted","Data":"3cf0fc6137a3bf6ac4aa4be6ccbf88d67fbae484275815a29c62fc1117d49e8e"} Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.216455 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7030e5d8-2d2b-4cc5-a283-339599595a18","Type":"ContainerStarted","Data":"223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd"} Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.223031 4774 generic.go:334] "Generic (PLEG): container finished" podID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerID="57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3" exitCode=0 Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.223097 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" event={"ID":"eca93d73-f946-48b2-8a7d-93f3ff9fdf90","Type":"ContainerDied","Data":"57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3"} Nov 21 14:23:10 crc kubenswrapper[4774]: I1121 14:23:10.322663 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.334409295 podStartE2EDuration="6.322632273s" podCreationTimestamp="2025-11-21 14:23:04 +0000 UTC" firstStartedPulling="2025-11-21 14:23:05.176453983 +0000 UTC m=+1175.828653232" lastFinishedPulling="2025-11-21 14:23:09.164676961 +0000 UTC m=+1179.816876210" observedRunningTime="2025-11-21 14:23:10.310630129 +0000 UTC m=+1180.962829388" watchObservedRunningTime="2025-11-21 14:23:10.322632273 +0000 UTC m=+1180.974831532" Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.233116 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" event={"ID":"eca93d73-f946-48b2-8a7d-93f3ff9fdf90","Type":"ContainerStarted","Data":"643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d"} Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.234019 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.234395 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3e0a71b0-ad47-44f0-9c49-59a1430418b8","Type":"ContainerStarted","Data":"c1a2b84c75c549c10af090d8ea02fb0cb0d9fa9237d914a82d014aeeec7b09df"} Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.234647 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.236946 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" event={"ID":"ef0c3332-e2c0-4e02-932d-ce49c5946ce4","Type":"ContainerStarted","Data":"59c36b986906c6fbbf6f4a065b70dcd811ad321eb2c5c6c1a1282e850f3e1338"} Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.237161 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.258831 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" podStartSLOduration=4.928513335 podStartE2EDuration="9.258791069s" podCreationTimestamp="2025-11-21 14:23:02 +0000 UTC" firstStartedPulling="2025-11-21 14:23:04.833507721 +0000 UTC m=+1175.485706980" lastFinishedPulling="2025-11-21 14:23:09.163785465 +0000 UTC m=+1179.815984714" observedRunningTime="2025-11-21 14:23:11.250934044 +0000 UTC m=+1181.903133363" watchObservedRunningTime="2025-11-21 14:23:11.258791069 +0000 UTC m=+1181.910990328" Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.278976 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" podStartSLOduration=5.357853156 podStartE2EDuration="9.278952198s" podCreationTimestamp="2025-11-21 14:23:02 +0000 UTC" firstStartedPulling="2025-11-21 14:23:04.834125308 +0000 UTC m=+1175.486324567" lastFinishedPulling="2025-11-21 14:23:08.75522436 +0000 UTC m=+1179.407423609" observedRunningTime="2025-11-21 14:23:11.276265821 +0000 UTC m=+1181.928465140" watchObservedRunningTime="2025-11-21 14:23:11.278952198 +0000 UTC m=+1181.931151457" Nov 21 14:23:11 crc kubenswrapper[4774]: I1121 14:23:11.301192 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.498302607 podStartE2EDuration="53.301158295s" podCreationTimestamp="2025-11-21 14:22:18 +0000 UTC" firstStartedPulling="2025-11-21 14:22:19.848679535 +0000 UTC m=+1130.500878794" lastFinishedPulling="2025-11-21 14:23:10.651535223 +0000 UTC m=+1181.303734482" observedRunningTime="2025-11-21 14:23:11.294316649 +0000 UTC m=+1181.946515908" watchObservedRunningTime="2025-11-21 14:23:11.301158295 +0000 UTC m=+1181.953357554" Nov 21 14:23:12 crc kubenswrapper[4774]: I1121 14:23:12.365367 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 21 14:23:13 crc kubenswrapper[4774]: I1121 14:23:13.254428 4774 generic.go:334] "Generic (PLEG): container finished" podID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerID="3cf0fc6137a3bf6ac4aa4be6ccbf88d67fbae484275815a29c62fc1117d49e8e" exitCode=0 Nov 21 14:23:13 crc kubenswrapper[4774]: I1121 14:23:13.254554 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8b17b723-7e23-4a12-916e-0f2d00b72239","Type":"ContainerDied","Data":"3cf0fc6137a3bf6ac4aa4be6ccbf88d67fbae484275815a29c62fc1117d49e8e"} Nov 21 14:23:13 crc kubenswrapper[4774]: I1121 14:23:13.257417 4774 generic.go:334] "Generic (PLEG): container finished" podID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerID="223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd" exitCode=0 Nov 21 14:23:13 crc kubenswrapper[4774]: I1121 14:23:13.257465 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7030e5d8-2d2b-4cc5-a283-339599595a18","Type":"ContainerDied","Data":"223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd"} Nov 21 14:23:14 crc kubenswrapper[4774]: I1121 14:23:14.269865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8b17b723-7e23-4a12-916e-0f2d00b72239","Type":"ContainerStarted","Data":"aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b"} Nov 21 14:23:14 crc kubenswrapper[4774]: I1121 14:23:14.272598 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7030e5d8-2d2b-4cc5-a283-339599595a18","Type":"ContainerStarted","Data":"a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06"} Nov 21 14:23:14 crc kubenswrapper[4774]: I1121 14:23:14.297151 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.16994267 podStartE2EDuration="59.297133017s" podCreationTimestamp="2025-11-21 14:22:15 +0000 UTC" firstStartedPulling="2025-11-21 14:22:18.035982041 +0000 UTC m=+1128.688181300" lastFinishedPulling="2025-11-21 14:23:09.163172388 +0000 UTC m=+1179.815371647" observedRunningTime="2025-11-21 14:23:14.293515584 +0000 UTC m=+1184.945714873" watchObservedRunningTime="2025-11-21 14:23:14.297133017 +0000 UTC m=+1184.949332266" Nov 21 14:23:14 crc kubenswrapper[4774]: I1121 14:23:14.325399 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.646808911 podStartE2EDuration="1m0.325366978s" podCreationTimestamp="2025-11-21 14:22:14 +0000 UTC" firstStartedPulling="2025-11-21 14:22:16.690978776 +0000 UTC m=+1127.343178045" lastFinishedPulling="2025-11-21 14:23:07.369536863 +0000 UTC m=+1178.021736112" observedRunningTime="2025-11-21 14:23:14.319200871 +0000 UTC m=+1184.971400130" watchObservedRunningTime="2025-11-21 14:23:14.325366978 +0000 UTC m=+1184.977566237" Nov 21 14:23:15 crc kubenswrapper[4774]: I1121 14:23:15.981909 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 21 14:23:15 crc kubenswrapper[4774]: I1121 14:23:15.981989 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 21 14:23:17 crc kubenswrapper[4774]: I1121 14:23:17.374713 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 21 14:23:17 crc kubenswrapper[4774]: I1121 14:23:17.375173 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 21 14:23:17 crc kubenswrapper[4774]: I1121 14:23:17.723123 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:18 crc kubenswrapper[4774]: I1121 14:23:18.325253 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 21 14:23:18 crc kubenswrapper[4774]: I1121 14:23:18.334214 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:18 crc kubenswrapper[4774]: I1121 14:23:18.416732 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77dd6f7c47-bwnjg"] Nov 21 14:23:18 crc kubenswrapper[4774]: I1121 14:23:18.417685 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerName="dnsmasq-dns" containerID="cri-o://643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d" gracePeriod=10 Nov 21 14:23:18 crc kubenswrapper[4774]: I1121 14:23:18.458100 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.004556 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.201043 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-dns-svc\") pod \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.201100 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-ovsdbserver-nb\") pod \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.201213 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-config\") pod \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.201330 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qmts\" (UniqueName: \"kubernetes.io/projected/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-kube-api-access-7qmts\") pod \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\" (UID: \"eca93d73-f946-48b2-8a7d-93f3ff9fdf90\") " Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.215250 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-kube-api-access-7qmts" (OuterVolumeSpecName: "kube-api-access-7qmts") pod "eca93d73-f946-48b2-8a7d-93f3ff9fdf90" (UID: "eca93d73-f946-48b2-8a7d-93f3ff9fdf90"). InnerVolumeSpecName "kube-api-access-7qmts". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.322384 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qmts\" (UniqueName: \"kubernetes.io/projected/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-kube-api-access-7qmts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.327892 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-config" (OuterVolumeSpecName: "config") pod "eca93d73-f946-48b2-8a7d-93f3ff9fdf90" (UID: "eca93d73-f946-48b2-8a7d-93f3ff9fdf90"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.329469 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.344971 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "eca93d73-f946-48b2-8a7d-93f3ff9fdf90" (UID: "eca93d73-f946-48b2-8a7d-93f3ff9fdf90"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.363465 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "eca93d73-f946-48b2-8a7d-93f3ff9fdf90" (UID: "eca93d73-f946-48b2-8a7d-93f3ff9fdf90"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.373392 4774 generic.go:334] "Generic (PLEG): container finished" podID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerID="643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d" exitCode=0 Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.374587 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.394612 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" event={"ID":"eca93d73-f946-48b2-8a7d-93f3ff9fdf90","Type":"ContainerDied","Data":"643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d"} Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.394708 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77dd6f7c47-bwnjg" event={"ID":"eca93d73-f946-48b2-8a7d-93f3ff9fdf90","Type":"ContainerDied","Data":"15bd10890bd281b6668cc8b962a541f90b488cd9882e3a72fd0a8a49299dc5e7"} Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.394740 4774 scope.go:117] "RemoveContainer" containerID="643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.424688 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.424724 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.424738 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eca93d73-f946-48b2-8a7d-93f3ff9fdf90-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.467918 4774 scope.go:117] "RemoveContainer" containerID="57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.478964 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77dd6f7c47-bwnjg"] Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.503319 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77dd6f7c47-bwnjg"] Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.530173 4774 scope.go:117] "RemoveContainer" containerID="643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d" Nov 21 14:23:19 crc kubenswrapper[4774]: E1121 14:23:19.547131 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d\": container with ID starting with 643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d not found: ID does not exist" containerID="643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.547238 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d"} err="failed to get container status \"643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d\": rpc error: code = NotFound desc = could not find container \"643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d\": container with ID starting with 643843b22c06709d1c7f001bd6898f9b33ab7d142487ef06e09f789cc55d1c8d not found: ID does not exist" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.547302 4774 scope.go:117] "RemoveContainer" containerID="57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3" Nov 21 14:23:19 crc kubenswrapper[4774]: E1121 14:23:19.553357 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3\": container with ID starting with 57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3 not found: ID does not exist" containerID="57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.553437 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3"} err="failed to get container status \"57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3\": rpc error: code = NotFound desc = could not find container \"57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3\": container with ID starting with 57d32bc073b86a5417874a7a080a2f58647ddb7b6de17b4aafa172c72f50afd3 not found: ID does not exist" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.561266 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bdfc8db59-wsd6r"] Nov 21 14:23:19 crc kubenswrapper[4774]: E1121 14:23:19.561706 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerName="init" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.561719 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerName="init" Nov 21 14:23:19 crc kubenswrapper[4774]: E1121 14:23:19.561735 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerName="dnsmasq-dns" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.561741 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerName="dnsmasq-dns" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.561917 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" containerName="dnsmasq-dns" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.562861 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.614312 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdfc8db59-wsd6r"] Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.733123 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.733191 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx7ml\" (UniqueName: \"kubernetes.io/projected/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-kube-api-access-gx7ml\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.733284 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-config\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.733435 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.733476 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-dns-svc\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.835322 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.835792 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-dns-svc\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.835871 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.835905 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx7ml\" (UniqueName: \"kubernetes.io/projected/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-kube-api-access-gx7ml\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.835936 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-config\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.836996 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-config\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.837652 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-dns-svc\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.839462 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-sb\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.840673 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-nb\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.863340 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx7ml\" (UniqueName: \"kubernetes.io/projected/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-kube-api-access-gx7ml\") pod \"dnsmasq-dns-7bdfc8db59-wsd6r\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.864311 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 21 14:23:19 crc kubenswrapper[4774]: I1121 14:23:19.900368 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.116413 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eca93d73-f946-48b2-8a7d-93f3ff9fdf90" path="/var/lib/kubelet/pods/eca93d73-f946-48b2-8a7d-93f3ff9fdf90/volumes" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.429541 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bdfc8db59-wsd6r"] Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.685507 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.692956 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.695340 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.705315 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.705524 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-4q68d" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.705598 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.705623 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.870929 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-cache\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.871002 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-lock\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.871311 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bm22n\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-kube-api-access-bm22n\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.871474 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.871575 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.973403 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-cache\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.973459 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-lock\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.973511 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bm22n\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-kube-api-access-bm22n\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.973547 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.973580 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: E1121 14:23:20.973940 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:23:20 crc kubenswrapper[4774]: E1121 14:23:20.973960 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 21 14:23:20 crc kubenswrapper[4774]: E1121 14:23:20.974029 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift podName:6cde8d60-bdf9-405f-8991-5c1f55b0ee76 nodeName:}" failed. No retries permitted until 2025-11-21 14:23:21.474004906 +0000 UTC m=+1192.126204165 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift") pod "swift-storage-0" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76") : configmap "swift-ring-files" not found Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.974452 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-lock\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.974560 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-cache\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.974587 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Nov 21 14:23:20 crc kubenswrapper[4774]: I1121 14:23:20.996672 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bm22n\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-kube-api-access-bm22n\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.004174 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.395792 4774 generic.go:334] "Generic (PLEG): container finished" podID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerID="04e64de391eb167cd118a4c5f0463876c76525463e056fc9e5d1b11fa488e936" exitCode=0 Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.395902 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" event={"ID":"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3","Type":"ContainerDied","Data":"04e64de391eb167cd118a4c5f0463876c76525463e056fc9e5d1b11fa488e936"} Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.395933 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" event={"ID":"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3","Type":"ContainerStarted","Data":"b137ab21c8f96f0329ab227b5b1b5a39f1682bafe3ad34e07a1cf7dc5bcff663"} Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.485136 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:21 crc kubenswrapper[4774]: E1121 14:23:21.485429 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:23:21 crc kubenswrapper[4774]: E1121 14:23:21.485615 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 21 14:23:21 crc kubenswrapper[4774]: E1121 14:23:21.485677 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift podName:6cde8d60-bdf9-405f-8991-5c1f55b0ee76 nodeName:}" failed. No retries permitted until 2025-11-21 14:23:22.48565308 +0000 UTC m=+1193.137852339 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift") pod "swift-storage-0" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76") : configmap "swift-ring-files" not found Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.490662 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 21 14:23:21 crc kubenswrapper[4774]: I1121 14:23:21.717589 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.405647 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" event={"ID":"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3","Type":"ContainerStarted","Data":"0f1f36be6c3f4241aee49f35177b450a1aa8f641aa4a6715c401f8e36d1ce504"} Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.424804 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" podStartSLOduration=3.424783221 podStartE2EDuration="3.424783221s" podCreationTimestamp="2025-11-21 14:23:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:23:22.421377293 +0000 UTC m=+1193.073576572" watchObservedRunningTime="2025-11-21 14:23:22.424783221 +0000 UTC m=+1193.076982480" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.505067 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:22 crc kubenswrapper[4774]: E1121 14:23:22.505411 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:23:22 crc kubenswrapper[4774]: E1121 14:23:22.505966 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 21 14:23:22 crc kubenswrapper[4774]: E1121 14:23:22.506071 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift podName:6cde8d60-bdf9-405f-8991-5c1f55b0ee76 nodeName:}" failed. No retries permitted until 2025-11-21 14:23:24.506031013 +0000 UTC m=+1195.158230272 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift") pod "swift-storage-0" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76") : configmap "swift-ring-files" not found Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.626778 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-t4rqm"] Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.628061 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.635261 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-128f-account-create-gpf7s"] Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.636710 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.639851 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.643955 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t4rqm"] Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.663588 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-128f-account-create-gpf7s"] Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.708809 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n948n\" (UniqueName: \"kubernetes.io/projected/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-kube-api-access-n948n\") pod \"glance-db-create-t4rqm\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.709002 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce49e63-9930-42ca-83ff-fc116eeacf1d-operator-scripts\") pod \"glance-128f-account-create-gpf7s\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.709054 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-operator-scripts\") pod \"glance-db-create-t4rqm\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.709195 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp9lq\" (UniqueName: \"kubernetes.io/projected/fce49e63-9930-42ca-83ff-fc116eeacf1d-kube-api-access-pp9lq\") pod \"glance-128f-account-create-gpf7s\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.810519 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce49e63-9930-42ca-83ff-fc116eeacf1d-operator-scripts\") pod \"glance-128f-account-create-gpf7s\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.810599 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-operator-scripts\") pod \"glance-db-create-t4rqm\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.810736 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp9lq\" (UniqueName: \"kubernetes.io/projected/fce49e63-9930-42ca-83ff-fc116eeacf1d-kube-api-access-pp9lq\") pod \"glance-128f-account-create-gpf7s\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.810767 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n948n\" (UniqueName: \"kubernetes.io/projected/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-kube-api-access-n948n\") pod \"glance-db-create-t4rqm\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.811585 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-operator-scripts\") pod \"glance-db-create-t4rqm\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.812601 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce49e63-9930-42ca-83ff-fc116eeacf1d-operator-scripts\") pod \"glance-128f-account-create-gpf7s\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.835634 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp9lq\" (UniqueName: \"kubernetes.io/projected/fce49e63-9930-42ca-83ff-fc116eeacf1d-kube-api-access-pp9lq\") pod \"glance-128f-account-create-gpf7s\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.839475 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n948n\" (UniqueName: \"kubernetes.io/projected/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-kube-api-access-n948n\") pod \"glance-db-create-t4rqm\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.950846 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:22 crc kubenswrapper[4774]: I1121 14:23:22.962640 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:23 crc kubenswrapper[4774]: I1121 14:23:23.415247 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:23 crc kubenswrapper[4774]: I1121 14:23:23.470083 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-128f-account-create-gpf7s"] Nov 21 14:23:23 crc kubenswrapper[4774]: W1121 14:23:23.474162 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfce49e63_9930_42ca_83ff_fc116eeacf1d.slice/crio-af6652b427232d87fa4c79040ff915cf31e2089a9afcd37f1ad35635e11faf36 WatchSource:0}: Error finding container af6652b427232d87fa4c79040ff915cf31e2089a9afcd37f1ad35635e11faf36: Status 404 returned error can't find the container with id af6652b427232d87fa4c79040ff915cf31e2089a9afcd37f1ad35635e11faf36 Nov 21 14:23:23 crc kubenswrapper[4774]: I1121 14:23:23.557747 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t4rqm"] Nov 21 14:23:23 crc kubenswrapper[4774]: W1121 14:23:23.584671 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11c290e3_78eb_4deb_82ac_8b3e93ef5c66.slice/crio-9dd59118f7780f73214958b8527340004819cfc392395c4eb4e78d3c0176e427 WatchSource:0}: Error finding container 9dd59118f7780f73214958b8527340004819cfc392395c4eb4e78d3c0176e427: Status 404 returned error can't find the container with id 9dd59118f7780f73214958b8527340004819cfc392395c4eb4e78d3c0176e427 Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.429002 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-128f-account-create-gpf7s" event={"ID":"fce49e63-9930-42ca-83ff-fc116eeacf1d","Type":"ContainerStarted","Data":"af6652b427232d87fa4c79040ff915cf31e2089a9afcd37f1ad35635e11faf36"} Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.498546 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4rqm" event={"ID":"11c290e3-78eb-4deb-82ac-8b3e93ef5c66","Type":"ContainerStarted","Data":"9dd59118f7780f73214958b8527340004819cfc392395c4eb4e78d3c0176e427"} Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.561322 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:24 crc kubenswrapper[4774]: E1121 14:23:24.561578 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:23:24 crc kubenswrapper[4774]: E1121 14:23:24.561604 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 21 14:23:24 crc kubenswrapper[4774]: E1121 14:23:24.561674 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift podName:6cde8d60-bdf9-405f-8991-5c1f55b0ee76 nodeName:}" failed. No retries permitted until 2025-11-21 14:23:28.561649097 +0000 UTC m=+1199.213848356 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift") pod "swift-storage-0" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76") : configmap "swift-ring-files" not found Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.581149 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-287qb"] Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.585661 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.591504 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-287qb"] Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.598288 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.598556 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.598600 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.766997 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-dispersionconf\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.767083 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-scripts\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.767105 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-combined-ca-bundle\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.767130 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-swiftconf\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.767192 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-ring-data-devices\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.767242 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4cff911-3af8-45f1-b86a-d3629217b328-etc-swift\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.767271 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhsh2\" (UniqueName: \"kubernetes.io/projected/f4cff911-3af8-45f1-b86a-d3629217b328-kube-api-access-zhsh2\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868583 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-dispersionconf\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868657 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-scripts\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868676 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-combined-ca-bundle\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868698 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-swiftconf\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868753 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-ring-data-devices\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868803 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4cff911-3af8-45f1-b86a-d3629217b328-etc-swift\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.868855 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhsh2\" (UniqueName: \"kubernetes.io/projected/f4cff911-3af8-45f1-b86a-d3629217b328-kube-api-access-zhsh2\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.869679 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4cff911-3af8-45f1-b86a-d3629217b328-etc-swift\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.869713 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-scripts\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.870194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-ring-data-devices\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.875762 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-swiftconf\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.879178 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-dispersionconf\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.882648 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-combined-ca-bundle\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.889518 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhsh2\" (UniqueName: \"kubernetes.io/projected/f4cff911-3af8-45f1-b86a-d3629217b328-kube-api-access-zhsh2\") pod \"swift-ring-rebalance-287qb\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:24 crc kubenswrapper[4774]: I1121 14:23:24.914697 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:25 crc kubenswrapper[4774]: I1121 14:23:25.391193 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-287qb"] Nov 21 14:23:25 crc kubenswrapper[4774]: I1121 14:23:25.508423 4774 generic.go:334] "Generic (PLEG): container finished" podID="11c290e3-78eb-4deb-82ac-8b3e93ef5c66" containerID="7816a7aeb0079af33aea84708be11ff4efaf0a43340ae98f3d1b5f43d97183df" exitCode=0 Nov 21 14:23:25 crc kubenswrapper[4774]: I1121 14:23:25.508535 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4rqm" event={"ID":"11c290e3-78eb-4deb-82ac-8b3e93ef5c66","Type":"ContainerDied","Data":"7816a7aeb0079af33aea84708be11ff4efaf0a43340ae98f3d1b5f43d97183df"} Nov 21 14:23:25 crc kubenswrapper[4774]: I1121 14:23:25.509910 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-287qb" event={"ID":"f4cff911-3af8-45f1-b86a-d3629217b328","Type":"ContainerStarted","Data":"466c5248f27112b759325850b15b683ce65d6679f7f4e603b326e14afa4d2864"} Nov 21 14:23:25 crc kubenswrapper[4774]: I1121 14:23:25.511588 4774 generic.go:334] "Generic (PLEG): container finished" podID="fce49e63-9930-42ca-83ff-fc116eeacf1d" containerID="ccd254ac8081ba427eba51acda723540d2c898bacd88918b0b167f5f8dc4e05c" exitCode=0 Nov 21 14:23:25 crc kubenswrapper[4774]: I1121 14:23:25.511634 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-128f-account-create-gpf7s" event={"ID":"fce49e63-9930-42ca-83ff-fc116eeacf1d","Type":"ContainerDied","Data":"ccd254ac8081ba427eba51acda723540d2c898bacd88918b0b167f5f8dc4e05c"} Nov 21 14:23:26 crc kubenswrapper[4774]: I1121 14:23:26.893209 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-zjwrf"] Nov 21 14:23:26 crc kubenswrapper[4774]: I1121 14:23:26.895204 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:26 crc kubenswrapper[4774]: I1121 14:23:26.902021 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-zjwrf"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.015973 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pbzg\" (UniqueName: \"kubernetes.io/projected/e0447367-db69-44ff-8077-29ac2c200dbf-kube-api-access-2pbzg\") pod \"keystone-db-create-zjwrf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.016584 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0447367-db69-44ff-8077-29ac2c200dbf-operator-scripts\") pod \"keystone-db-create-zjwrf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.024071 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-88ef-account-create-hlvhs"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.025516 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.030184 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.031637 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-88ef-account-create-hlvhs"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.118160 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-operator-scripts\") pod \"keystone-88ef-account-create-hlvhs\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.118224 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0447367-db69-44ff-8077-29ac2c200dbf-operator-scripts\") pod \"keystone-db-create-zjwrf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.118265 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqqgq\" (UniqueName: \"kubernetes.io/projected/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-kube-api-access-zqqgq\") pod \"keystone-88ef-account-create-hlvhs\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.118317 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pbzg\" (UniqueName: \"kubernetes.io/projected/e0447367-db69-44ff-8077-29ac2c200dbf-kube-api-access-2pbzg\") pod \"keystone-db-create-zjwrf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.119243 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0447367-db69-44ff-8077-29ac2c200dbf-operator-scripts\") pod \"keystone-db-create-zjwrf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.144752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pbzg\" (UniqueName: \"kubernetes.io/projected/e0447367-db69-44ff-8077-29ac2c200dbf-kube-api-access-2pbzg\") pod \"keystone-db-create-zjwrf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.218738 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.220180 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-operator-scripts\") pod \"keystone-88ef-account-create-hlvhs\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.220245 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqqgq\" (UniqueName: \"kubernetes.io/projected/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-kube-api-access-zqqgq\") pod \"keystone-88ef-account-create-hlvhs\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.222775 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-operator-scripts\") pod \"keystone-88ef-account-create-hlvhs\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.244769 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqqgq\" (UniqueName: \"kubernetes.io/projected/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-kube-api-access-zqqgq\") pod \"keystone-88ef-account-create-hlvhs\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.320078 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9pp78"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.321565 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.330389 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-2802-account-create-tkf4w"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.332143 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.334861 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.344908 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9pp78"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.347567 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.356737 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-2802-account-create-tkf4w"] Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.433250 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxt4w\" (UniqueName: \"kubernetes.io/projected/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-kube-api-access-jxt4w\") pod \"placement-2802-account-create-tkf4w\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.433711 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djg5p\" (UniqueName: \"kubernetes.io/projected/ab5c0068-d61b-4d09-8632-70a5b637910c-kube-api-access-djg5p\") pod \"placement-db-create-9pp78\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.433761 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab5c0068-d61b-4d09-8632-70a5b637910c-operator-scripts\") pod \"placement-db-create-9pp78\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.433864 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-operator-scripts\") pod \"placement-2802-account-create-tkf4w\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.466580 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-2sxpw" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" probeResult="failure" output=< Nov 21 14:23:27 crc kubenswrapper[4774]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 21 14:23:27 crc kubenswrapper[4774]: > Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.536263 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxt4w\" (UniqueName: \"kubernetes.io/projected/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-kube-api-access-jxt4w\") pod \"placement-2802-account-create-tkf4w\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.536341 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djg5p\" (UniqueName: \"kubernetes.io/projected/ab5c0068-d61b-4d09-8632-70a5b637910c-kube-api-access-djg5p\") pod \"placement-db-create-9pp78\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.536384 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab5c0068-d61b-4d09-8632-70a5b637910c-operator-scripts\") pod \"placement-db-create-9pp78\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.536460 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-operator-scripts\") pod \"placement-2802-account-create-tkf4w\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.537432 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-operator-scripts\") pod \"placement-2802-account-create-tkf4w\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.538790 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab5c0068-d61b-4d09-8632-70a5b637910c-operator-scripts\") pod \"placement-db-create-9pp78\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.569045 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djg5p\" (UniqueName: \"kubernetes.io/projected/ab5c0068-d61b-4d09-8632-70a5b637910c-kube-api-access-djg5p\") pod \"placement-db-create-9pp78\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.570503 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxt4w\" (UniqueName: \"kubernetes.io/projected/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-kube-api-access-jxt4w\") pod \"placement-2802-account-create-tkf4w\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.663277 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9pp78" Nov 21 14:23:27 crc kubenswrapper[4774]: I1121 14:23:27.675478 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.154096 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.215722 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.252522 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-operator-scripts\") pod \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.252698 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n948n\" (UniqueName: \"kubernetes.io/projected/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-kube-api-access-n948n\") pod \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\" (UID: \"11c290e3-78eb-4deb-82ac-8b3e93ef5c66\") " Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.257212 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "11c290e3-78eb-4deb-82ac-8b3e93ef5c66" (UID: "11c290e3-78eb-4deb-82ac-8b3e93ef5c66"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.267182 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-kube-api-access-n948n" (OuterVolumeSpecName: "kube-api-access-n948n") pod "11c290e3-78eb-4deb-82ac-8b3e93ef5c66" (UID: "11c290e3-78eb-4deb-82ac-8b3e93ef5c66"). InnerVolumeSpecName "kube-api-access-n948n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.354939 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce49e63-9930-42ca-83ff-fc116eeacf1d-operator-scripts\") pod \"fce49e63-9930-42ca-83ff-fc116eeacf1d\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.355138 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pp9lq\" (UniqueName: \"kubernetes.io/projected/fce49e63-9930-42ca-83ff-fc116eeacf1d-kube-api-access-pp9lq\") pod \"fce49e63-9930-42ca-83ff-fc116eeacf1d\" (UID: \"fce49e63-9930-42ca-83ff-fc116eeacf1d\") " Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.355687 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n948n\" (UniqueName: \"kubernetes.io/projected/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-kube-api-access-n948n\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.355720 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c290e3-78eb-4deb-82ac-8b3e93ef5c66-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.355782 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fce49e63-9930-42ca-83ff-fc116eeacf1d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fce49e63-9930-42ca-83ff-fc116eeacf1d" (UID: "fce49e63-9930-42ca-83ff-fc116eeacf1d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.358988 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce49e63-9930-42ca-83ff-fc116eeacf1d-kube-api-access-pp9lq" (OuterVolumeSpecName: "kube-api-access-pp9lq") pod "fce49e63-9930-42ca-83ff-fc116eeacf1d" (UID: "fce49e63-9930-42ca-83ff-fc116eeacf1d"). InnerVolumeSpecName "kube-api-access-pp9lq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.457959 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fce49e63-9930-42ca-83ff-fc116eeacf1d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.458020 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pp9lq\" (UniqueName: \"kubernetes.io/projected/fce49e63-9930-42ca-83ff-fc116eeacf1d-kube-api-access-pp9lq\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.561952 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:28 crc kubenswrapper[4774]: E1121 14:23:28.562161 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:23:28 crc kubenswrapper[4774]: E1121 14:23:28.562490 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 21 14:23:28 crc kubenswrapper[4774]: E1121 14:23:28.562557 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift podName:6cde8d60-bdf9-405f-8991-5c1f55b0ee76 nodeName:}" failed. No retries permitted until 2025-11-21 14:23:36.562535509 +0000 UTC m=+1207.214734768 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift") pod "swift-storage-0" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76") : configmap "swift-ring-files" not found Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.564369 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-128f-account-create-gpf7s" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.565669 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-128f-account-create-gpf7s" event={"ID":"fce49e63-9930-42ca-83ff-fc116eeacf1d","Type":"ContainerDied","Data":"af6652b427232d87fa4c79040ff915cf31e2089a9afcd37f1ad35635e11faf36"} Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.565928 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af6652b427232d87fa4c79040ff915cf31e2089a9afcd37f1ad35635e11faf36" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.570389 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t4rqm" event={"ID":"11c290e3-78eb-4deb-82ac-8b3e93ef5c66","Type":"ContainerDied","Data":"9dd59118f7780f73214958b8527340004819cfc392395c4eb4e78d3c0176e427"} Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.570435 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dd59118f7780f73214958b8527340004819cfc392395c4eb4e78d3c0176e427" Nov 21 14:23:28 crc kubenswrapper[4774]: I1121 14:23:28.570542 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t4rqm" Nov 21 14:23:29 crc kubenswrapper[4774]: I1121 14:23:29.902353 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:23:29 crc kubenswrapper[4774]: I1121 14:23:29.984148 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78bc665c87-n2whg"] Nov 21 14:23:29 crc kubenswrapper[4774]: I1121 14:23:29.984421 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerName="dnsmasq-dns" containerID="cri-o://59c36b986906c6fbbf6f4a065b70dcd811ad321eb2c5c6c1a1282e850f3e1338" gracePeriod=10 Nov 21 14:23:30 crc kubenswrapper[4774]: I1121 14:23:30.595496 4774 generic.go:334] "Generic (PLEG): container finished" podID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerID="59c36b986906c6fbbf6f4a065b70dcd811ad321eb2c5c6c1a1282e850f3e1338" exitCode=0 Nov 21 14:23:30 crc kubenswrapper[4774]: I1121 14:23:30.595994 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" event={"ID":"ef0c3332-e2c0-4e02-932d-ce49c5946ce4","Type":"ContainerDied","Data":"59c36b986906c6fbbf6f4a065b70dcd811ad321eb2c5c6c1a1282e850f3e1338"} Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.577093 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9pp78"] Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.589420 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-88ef-account-create-hlvhs"] Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.610841 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9pp78" event={"ID":"ab5c0068-d61b-4d09-8632-70a5b637910c","Type":"ContainerStarted","Data":"80f2a969407f4b46ad1554a512252c5eb28ebbf31b33110a8e82a746caf0d715"} Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.618205 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-2802-account-create-tkf4w"] Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.655263 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 21 14:23:31 crc kubenswrapper[4774]: W1121 14:23:31.665959 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f7e56cf_c53d_4d3e_8e76_a8de6556546b.slice/crio-9923d6f4d9ee15b3f9b0040fd5ce454ba355f81022fb99e0ff3a6455ee2c0506 WatchSource:0}: Error finding container 9923d6f4d9ee15b3f9b0040fd5ce454ba355f81022fb99e0ff3a6455ee2c0506: Status 404 returned error can't find the container with id 9923d6f4d9ee15b3f9b0040fd5ce454ba355f81022fb99e0ff3a6455ee2c0506 Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.698962 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.777406 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-zjwrf"] Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.790503 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:31 crc kubenswrapper[4774]: W1121 14:23:31.795339 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0447367_db69_44ff_8077_29ac2c200dbf.slice/crio-8894be006d3ebfbdc32aa9c8e31cf8819626bd667468bfb846cc8c7e33476078 WatchSource:0}: Error finding container 8894be006d3ebfbdc32aa9c8e31cf8819626bd667468bfb846cc8c7e33476078: Status 404 returned error can't find the container with id 8894be006d3ebfbdc32aa9c8e31cf8819626bd667468bfb846cc8c7e33476078 Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.930571 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-config\") pod \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.931665 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpxs4\" (UniqueName: \"kubernetes.io/projected/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-kube-api-access-dpxs4\") pod \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.931958 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-nb\") pod \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.932046 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-sb\") pod \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.932076 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-dns-svc\") pod \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\" (UID: \"ef0c3332-e2c0-4e02-932d-ce49c5946ce4\") " Nov 21 14:23:31 crc kubenswrapper[4774]: I1121 14:23:31.937110 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-kube-api-access-dpxs4" (OuterVolumeSpecName: "kube-api-access-dpxs4") pod "ef0c3332-e2c0-4e02-932d-ce49c5946ce4" (UID: "ef0c3332-e2c0-4e02-932d-ce49c5946ce4"). InnerVolumeSpecName "kube-api-access-dpxs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.019017 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-config" (OuterVolumeSpecName: "config") pod "ef0c3332-e2c0-4e02-932d-ce49c5946ce4" (UID: "ef0c3332-e2c0-4e02-932d-ce49c5946ce4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.022640 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ef0c3332-e2c0-4e02-932d-ce49c5946ce4" (UID: "ef0c3332-e2c0-4e02-932d-ce49c5946ce4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.034577 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.034917 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpxs4\" (UniqueName: \"kubernetes.io/projected/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-kube-api-access-dpxs4\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.035017 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.042800 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ef0c3332-e2c0-4e02-932d-ce49c5946ce4" (UID: "ef0c3332-e2c0-4e02-932d-ce49c5946ce4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.064206 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ef0c3332-e2c0-4e02-932d-ce49c5946ce4" (UID: "ef0c3332-e2c0-4e02-932d-ce49c5946ce4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.137160 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.137211 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ef0c3332-e2c0-4e02-932d-ce49c5946ce4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.404288 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-2sxpw" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" probeResult="failure" output=< Nov 21 14:23:32 crc kubenswrapper[4774]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 21 14:23:32 crc kubenswrapper[4774]: > Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.412985 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.415652 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.621133 4774 generic.go:334] "Generic (PLEG): container finished" podID="4f7e56cf-c53d-4d3e-8e76-a8de6556546b" containerID="658d4d24fc6e94b57c7bb466d4271afd9667deeb008df3a43b13453eda4f811e" exitCode=0 Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.621245 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2802-account-create-tkf4w" event={"ID":"4f7e56cf-c53d-4d3e-8e76-a8de6556546b","Type":"ContainerDied","Data":"658d4d24fc6e94b57c7bb466d4271afd9667deeb008df3a43b13453eda4f811e"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.621300 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2802-account-create-tkf4w" event={"ID":"4f7e56cf-c53d-4d3e-8e76-a8de6556546b","Type":"ContainerStarted","Data":"9923d6f4d9ee15b3f9b0040fd5ce454ba355f81022fb99e0ff3a6455ee2c0506"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.624007 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-287qb" event={"ID":"f4cff911-3af8-45f1-b86a-d3629217b328","Type":"ContainerStarted","Data":"a63e72ed87ad6e5829063d53d36ef358e0c60f0ac83c22a939e23b0679064a2e"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.627001 4774 generic.go:334] "Generic (PLEG): container finished" podID="ab5c0068-d61b-4d09-8632-70a5b637910c" containerID="5a1430b8284950fa8feb2b26ca8f0daccce10ea31e4279be89c3cfb24be1dc61" exitCode=0 Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.627145 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9pp78" event={"ID":"ab5c0068-d61b-4d09-8632-70a5b637910c","Type":"ContainerDied","Data":"5a1430b8284950fa8feb2b26ca8f0daccce10ea31e4279be89c3cfb24be1dc61"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.628805 4774 generic.go:334] "Generic (PLEG): container finished" podID="e0447367-db69-44ff-8077-29ac2c200dbf" containerID="61b465c77a81919460bfd27ff0b88f6f73822bcd4ca5c29028e54ff4fd70406e" exitCode=0 Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.628907 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zjwrf" event={"ID":"e0447367-db69-44ff-8077-29ac2c200dbf","Type":"ContainerDied","Data":"61b465c77a81919460bfd27ff0b88f6f73822bcd4ca5c29028e54ff4fd70406e"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.628948 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zjwrf" event={"ID":"e0447367-db69-44ff-8077-29ac2c200dbf","Type":"ContainerStarted","Data":"8894be006d3ebfbdc32aa9c8e31cf8819626bd667468bfb846cc8c7e33476078"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.630963 4774 generic.go:334] "Generic (PLEG): container finished" podID="71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" containerID="4b636b9b604abd7e65fdf5cd245e45bed745018b346fa76088458d3add55daed" exitCode=0 Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.631078 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-88ef-account-create-hlvhs" event={"ID":"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e","Type":"ContainerDied","Data":"4b636b9b604abd7e65fdf5cd245e45bed745018b346fa76088458d3add55daed"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.631099 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-88ef-account-create-hlvhs" event={"ID":"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e","Type":"ContainerStarted","Data":"0233bb3866d3ea56c86dd88162d603c67d60d5e70c0b00b0a5c1c83f4cf24aee"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.636175 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" event={"ID":"ef0c3332-e2c0-4e02-932d-ce49c5946ce4","Type":"ContainerDied","Data":"93493d453f399ed4899ee79fcd85c60bbb6e39b3195e09d5c6c8149e03855128"} Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.636305 4774 scope.go:117] "RemoveContainer" containerID="59c36b986906c6fbbf6f4a065b70dcd811ad321eb2c5c6c1a1282e850f3e1338" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.636205 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78bc665c87-n2whg" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.713683 4774 scope.go:117] "RemoveContainer" containerID="b76020aca53ac9257235cc5beb94b75d8d743299edd5cebaff629a3e207058b5" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.747531 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-2sxpw-config-nsj7m"] Nov 21 14:23:32 crc kubenswrapper[4774]: E1121 14:23:32.751344 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce49e63-9930-42ca-83ff-fc116eeacf1d" containerName="mariadb-account-create" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.751478 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce49e63-9930-42ca-83ff-fc116eeacf1d" containerName="mariadb-account-create" Nov 21 14:23:32 crc kubenswrapper[4774]: E1121 14:23:32.751591 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11c290e3-78eb-4deb-82ac-8b3e93ef5c66" containerName="mariadb-database-create" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.751665 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="11c290e3-78eb-4deb-82ac-8b3e93ef5c66" containerName="mariadb-database-create" Nov 21 14:23:32 crc kubenswrapper[4774]: E1121 14:23:32.751786 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerName="dnsmasq-dns" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.751895 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerName="dnsmasq-dns" Nov 21 14:23:32 crc kubenswrapper[4774]: E1121 14:23:32.751989 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerName="init" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.752066 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerName="init" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.752513 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="11c290e3-78eb-4deb-82ac-8b3e93ef5c66" containerName="mariadb-database-create" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.756086 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce49e63-9930-42ca-83ff-fc116eeacf1d" containerName="mariadb-account-create" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.756212 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" containerName="dnsmasq-dns" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.759423 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sxpw-config-nsj7m"] Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.759793 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.763753 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.780913 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-287qb" podStartSLOduration=2.8464545980000002 podStartE2EDuration="8.78088314s" podCreationTimestamp="2025-11-21 14:23:24 +0000 UTC" firstStartedPulling="2025-11-21 14:23:25.401071747 +0000 UTC m=+1196.053271006" lastFinishedPulling="2025-11-21 14:23:31.335500289 +0000 UTC m=+1201.987699548" observedRunningTime="2025-11-21 14:23:32.756440228 +0000 UTC m=+1203.408639477" watchObservedRunningTime="2025-11-21 14:23:32.78088314 +0000 UTC m=+1203.433082389" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.827913 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-6wkpn"] Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.829666 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.837460 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.837557 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-487jw" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.859947 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78bc665c87-n2whg"] Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.890235 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run-ovn\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.890328 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d288z\" (UniqueName: \"kubernetes.io/projected/de865f32-866b-4e4f-8b90-a13641b5cfec-kube-api-access-d288z\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.890393 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-scripts\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.890428 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.890460 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-additional-scripts\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.890494 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-log-ovn\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.894891 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6wkpn"] Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.908807 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78bc665c87-n2whg"] Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992445 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8jbv\" (UniqueName: \"kubernetes.io/projected/e3386949-a3f5-453c-953e-8deedb418d28-kube-api-access-r8jbv\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992544 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run-ovn\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992575 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-db-sync-config-data\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992615 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d288z\" (UniqueName: \"kubernetes.io/projected/de865f32-866b-4e4f-8b90-a13641b5cfec-kube-api-access-d288z\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992680 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-combined-ca-bundle\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992713 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-scripts\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992754 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992782 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-additional-scripts\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992813 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-log-ovn\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.992860 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-config-data\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.993022 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run-ovn\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.993363 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.993458 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-log-ovn\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.994072 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-additional-scripts\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:32 crc kubenswrapper[4774]: I1121 14:23:32.995671 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-scripts\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.016692 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d288z\" (UniqueName: \"kubernetes.io/projected/de865f32-866b-4e4f-8b90-a13641b5cfec-kube-api-access-d288z\") pod \"ovn-controller-2sxpw-config-nsj7m\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.094620 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8jbv\" (UniqueName: \"kubernetes.io/projected/e3386949-a3f5-453c-953e-8deedb418d28-kube-api-access-r8jbv\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.094732 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-db-sync-config-data\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.094950 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-combined-ca-bundle\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.095203 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-config-data\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.099519 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-combined-ca-bundle\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.099610 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-config-data\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.100711 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-db-sync-config-data\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.113983 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8jbv\" (UniqueName: \"kubernetes.io/projected/e3386949-a3f5-453c-953e-8deedb418d28-kube-api-access-r8jbv\") pod \"glance-db-sync-6wkpn\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.119030 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.162328 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6wkpn" Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.600886 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-2sxpw-config-nsj7m"] Nov 21 14:23:33 crc kubenswrapper[4774]: I1121 14:23:33.663401 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw-config-nsj7m" event={"ID":"de865f32-866b-4e4f-8b90-a13641b5cfec","Type":"ContainerStarted","Data":"cbf5df3ccab0e24a2184cff7fd9375630b2ef642bfbdbb40733e166044b4b3af"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.073296 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-6wkpn"] Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.114324 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef0c3332-e2c0-4e02-932d-ce49c5946ce4" path="/var/lib/kubelet/pods/ef0c3332-e2c0-4e02-932d-ce49c5946ce4/volumes" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.276404 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9pp78" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.344529 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab5c0068-d61b-4d09-8632-70a5b637910c-operator-scripts\") pod \"ab5c0068-d61b-4d09-8632-70a5b637910c\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.344634 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djg5p\" (UniqueName: \"kubernetes.io/projected/ab5c0068-d61b-4d09-8632-70a5b637910c-kube-api-access-djg5p\") pod \"ab5c0068-d61b-4d09-8632-70a5b637910c\" (UID: \"ab5c0068-d61b-4d09-8632-70a5b637910c\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.345444 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab5c0068-d61b-4d09-8632-70a5b637910c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab5c0068-d61b-4d09-8632-70a5b637910c" (UID: "ab5c0068-d61b-4d09-8632-70a5b637910c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.371336 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab5c0068-d61b-4d09-8632-70a5b637910c-kube-api-access-djg5p" (OuterVolumeSpecName: "kube-api-access-djg5p") pod "ab5c0068-d61b-4d09-8632-70a5b637910c" (UID: "ab5c0068-d61b-4d09-8632-70a5b637910c"). InnerVolumeSpecName "kube-api-access-djg5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.427957 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.449623 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-operator-scripts\") pod \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.450238 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxt4w\" (UniqueName: \"kubernetes.io/projected/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-kube-api-access-jxt4w\") pod \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\" (UID: \"4f7e56cf-c53d-4d3e-8e76-a8de6556546b\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.451032 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djg5p\" (UniqueName: \"kubernetes.io/projected/ab5c0068-d61b-4d09-8632-70a5b637910c-kube-api-access-djg5p\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.451124 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab5c0068-d61b-4d09-8632-70a5b637910c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.451606 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f7e56cf-c53d-4d3e-8e76-a8de6556546b" (UID: "4f7e56cf-c53d-4d3e-8e76-a8de6556546b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.463261 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-kube-api-access-jxt4w" (OuterVolumeSpecName: "kube-api-access-jxt4w") pod "4f7e56cf-c53d-4d3e-8e76-a8de6556546b" (UID: "4f7e56cf-c53d-4d3e-8e76-a8de6556546b"). InnerVolumeSpecName "kube-api-access-jxt4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.486950 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.501574 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.552276 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqqgq\" (UniqueName: \"kubernetes.io/projected/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-kube-api-access-zqqgq\") pod \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.552546 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0447367-db69-44ff-8077-29ac2c200dbf-operator-scripts\") pod \"e0447367-db69-44ff-8077-29ac2c200dbf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.552590 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pbzg\" (UniqueName: \"kubernetes.io/projected/e0447367-db69-44ff-8077-29ac2c200dbf-kube-api-access-2pbzg\") pod \"e0447367-db69-44ff-8077-29ac2c200dbf\" (UID: \"e0447367-db69-44ff-8077-29ac2c200dbf\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.552723 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-operator-scripts\") pod \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\" (UID: \"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e\") " Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.553558 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxt4w\" (UniqueName: \"kubernetes.io/projected/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-kube-api-access-jxt4w\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.553589 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7e56cf-c53d-4d3e-8e76-a8de6556546b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.554740 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0447367-db69-44ff-8077-29ac2c200dbf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0447367-db69-44ff-8077-29ac2c200dbf" (UID: "e0447367-db69-44ff-8077-29ac2c200dbf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.555755 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" (UID: "71c4ef14-3bfc-4cb6-806a-a864a67fdf7e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.561997 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0447367-db69-44ff-8077-29ac2c200dbf-kube-api-access-2pbzg" (OuterVolumeSpecName: "kube-api-access-2pbzg") pod "e0447367-db69-44ff-8077-29ac2c200dbf" (UID: "e0447367-db69-44ff-8077-29ac2c200dbf"). InnerVolumeSpecName "kube-api-access-2pbzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.562685 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-kube-api-access-zqqgq" (OuterVolumeSpecName: "kube-api-access-zqqgq") pod "71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" (UID: "71c4ef14-3bfc-4cb6-806a-a864a67fdf7e"). InnerVolumeSpecName "kube-api-access-zqqgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.658123 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0447367-db69-44ff-8077-29ac2c200dbf-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.658189 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pbzg\" (UniqueName: \"kubernetes.io/projected/e0447367-db69-44ff-8077-29ac2c200dbf-kube-api-access-2pbzg\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.658208 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.658223 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqqgq\" (UniqueName: \"kubernetes.io/projected/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e-kube-api-access-zqqgq\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.680874 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-88ef-account-create-hlvhs" event={"ID":"71c4ef14-3bfc-4cb6-806a-a864a67fdf7e","Type":"ContainerDied","Data":"0233bb3866d3ea56c86dd88162d603c67d60d5e70c0b00b0a5c1c83f4cf24aee"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.680950 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0233bb3866d3ea56c86dd88162d603c67d60d5e70c0b00b0a5c1c83f4cf24aee" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.681043 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-88ef-account-create-hlvhs" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.686737 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6wkpn" event={"ID":"e3386949-a3f5-453c-953e-8deedb418d28","Type":"ContainerStarted","Data":"bffec3a69e8046d4c85de9307c27120b5fe8685f4e62837ccc08b099358372d2"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.688766 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-2802-account-create-tkf4w" event={"ID":"4f7e56cf-c53d-4d3e-8e76-a8de6556546b","Type":"ContainerDied","Data":"9923d6f4d9ee15b3f9b0040fd5ce454ba355f81022fb99e0ff3a6455ee2c0506"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.688804 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9923d6f4d9ee15b3f9b0040fd5ce454ba355f81022fb99e0ff3a6455ee2c0506" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.688898 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-2802-account-create-tkf4w" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.690644 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9pp78" event={"ID":"ab5c0068-d61b-4d09-8632-70a5b637910c","Type":"ContainerDied","Data":"80f2a969407f4b46ad1554a512252c5eb28ebbf31b33110a8e82a746caf0d715"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.690708 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80f2a969407f4b46ad1554a512252c5eb28ebbf31b33110a8e82a746caf0d715" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.690808 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9pp78" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.697683 4774 generic.go:334] "Generic (PLEG): container finished" podID="de865f32-866b-4e4f-8b90-a13641b5cfec" containerID="3779ec4405b00452ac39136a499c21798d1756a48805e3682c76d529cb3c3f65" exitCode=0 Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.697794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw-config-nsj7m" event={"ID":"de865f32-866b-4e4f-8b90-a13641b5cfec","Type":"ContainerDied","Data":"3779ec4405b00452ac39136a499c21798d1756a48805e3682c76d529cb3c3f65"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.700538 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-zjwrf" event={"ID":"e0447367-db69-44ff-8077-29ac2c200dbf","Type":"ContainerDied","Data":"8894be006d3ebfbdc32aa9c8e31cf8819626bd667468bfb846cc8c7e33476078"} Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.700607 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8894be006d3ebfbdc32aa9c8e31cf8819626bd667468bfb846cc8c7e33476078" Nov 21 14:23:34 crc kubenswrapper[4774]: I1121 14:23:34.700687 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-zjwrf" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.159300 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.210207 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d288z\" (UniqueName: \"kubernetes.io/projected/de865f32-866b-4e4f-8b90-a13641b5cfec-kube-api-access-d288z\") pod \"de865f32-866b-4e4f-8b90-a13641b5cfec\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.210364 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run-ovn\") pod \"de865f32-866b-4e4f-8b90-a13641b5cfec\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.210409 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-scripts\") pod \"de865f32-866b-4e4f-8b90-a13641b5cfec\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.210456 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-additional-scripts\") pod \"de865f32-866b-4e4f-8b90-a13641b5cfec\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.210500 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run\") pod \"de865f32-866b-4e4f-8b90-a13641b5cfec\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.210540 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-log-ovn\") pod \"de865f32-866b-4e4f-8b90-a13641b5cfec\" (UID: \"de865f32-866b-4e4f-8b90-a13641b5cfec\") " Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.212005 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "de865f32-866b-4e4f-8b90-a13641b5cfec" (UID: "de865f32-866b-4e4f-8b90-a13641b5cfec"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.212076 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run" (OuterVolumeSpecName: "var-run") pod "de865f32-866b-4e4f-8b90-a13641b5cfec" (UID: "de865f32-866b-4e4f-8b90-a13641b5cfec"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.212753 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "de865f32-866b-4e4f-8b90-a13641b5cfec" (UID: "de865f32-866b-4e4f-8b90-a13641b5cfec"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.212873 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-scripts" (OuterVolumeSpecName: "scripts") pod "de865f32-866b-4e4f-8b90-a13641b5cfec" (UID: "de865f32-866b-4e4f-8b90-a13641b5cfec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.212945 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "de865f32-866b-4e4f-8b90-a13641b5cfec" (UID: "de865f32-866b-4e4f-8b90-a13641b5cfec"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.233021 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de865f32-866b-4e4f-8b90-a13641b5cfec-kube-api-access-d288z" (OuterVolumeSpecName: "kube-api-access-d288z") pod "de865f32-866b-4e4f-8b90-a13641b5cfec" (UID: "de865f32-866b-4e4f-8b90-a13641b5cfec"). InnerVolumeSpecName "kube-api-access-d288z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.313152 4774 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.313205 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.313219 4774 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/de865f32-866b-4e4f-8b90-a13641b5cfec-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.313234 4774 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.313246 4774 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/de865f32-866b-4e4f-8b90-a13641b5cfec-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.313258 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d288z\" (UniqueName: \"kubernetes.io/projected/de865f32-866b-4e4f-8b90-a13641b5cfec-kube-api-access-d288z\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.619514 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:36 crc kubenswrapper[4774]: E1121 14:23:36.619782 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:23:36 crc kubenswrapper[4774]: E1121 14:23:36.619851 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Nov 21 14:23:36 crc kubenswrapper[4774]: E1121 14:23:36.619916 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift podName:6cde8d60-bdf9-405f-8991-5c1f55b0ee76 nodeName:}" failed. No retries permitted until 2025-11-21 14:23:52.619897595 +0000 UTC m=+1223.272096854 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift") pod "swift-storage-0" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76") : configmap "swift-ring-files" not found Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.726883 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw-config-nsj7m" event={"ID":"de865f32-866b-4e4f-8b90-a13641b5cfec","Type":"ContainerDied","Data":"cbf5df3ccab0e24a2184cff7fd9375630b2ef642bfbdbb40733e166044b4b3af"} Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.726956 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbf5df3ccab0e24a2184cff7fd9375630b2ef642bfbdbb40733e166044b4b3af" Nov 21 14:23:36 crc kubenswrapper[4774]: I1121 14:23:36.727050 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw-config-nsj7m" Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.303903 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2sxpw-config-nsj7m"] Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.318434 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-2sxpw-config-nsj7m"] Nov 21 14:23:37 crc kubenswrapper[4774]: E1121 14:23:37.452530 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64e33a39_c371_477f_b1c9_d58189db4bc8.slice/crio-c8583eef8a391a28ea2dc5e764d94e0aa5490a82e94adc85f543fff3c67bdb93.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2685b76_2150_4209_a55b_a989ae40b7db.slice/crio-d28eebf85b23a893614d02f00de474df7cb0032d8a129eb8f057b60aeb7a3b5d.scope\": RecentStats: unable to find data in memory cache]" Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.664641 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-2sxpw" Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.739619 4774 generic.go:334] "Generic (PLEG): container finished" podID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerID="c8583eef8a391a28ea2dc5e764d94e0aa5490a82e94adc85f543fff3c67bdb93" exitCode=0 Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.739715 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"64e33a39-c371-477f-b1c9-d58189db4bc8","Type":"ContainerDied","Data":"c8583eef8a391a28ea2dc5e764d94e0aa5490a82e94adc85f543fff3c67bdb93"} Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.745156 4774 generic.go:334] "Generic (PLEG): container finished" podID="e2685b76-2150-4209-a55b-a989ae40b7db" containerID="d28eebf85b23a893614d02f00de474df7cb0032d8a129eb8f057b60aeb7a3b5d" exitCode=0 Nov 21 14:23:37 crc kubenswrapper[4774]: I1121 14:23:37.745198 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2685b76-2150-4209-a55b-a989ae40b7db","Type":"ContainerDied","Data":"d28eebf85b23a893614d02f00de474df7cb0032d8a129eb8f057b60aeb7a3b5d"} Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.111650 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de865f32-866b-4e4f-8b90-a13641b5cfec" path="/var/lib/kubelet/pods/de865f32-866b-4e4f-8b90-a13641b5cfec/volumes" Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.778604 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2685b76-2150-4209-a55b-a989ae40b7db","Type":"ContainerStarted","Data":"6bc151b541f61d18fa6a5bd4d47d620e359bf3af5784a910604388f99bb6a180"} Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.780288 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.799957 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"64e33a39-c371-477f-b1c9-d58189db4bc8","Type":"ContainerStarted","Data":"e113bb91e61fb20bd55da6f381dd07a86f741c04641af203c9cd800b9d16d231"} Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.801093 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.818344 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.68848086 podStartE2EDuration="1m26.818319459s" podCreationTimestamp="2025-11-21 14:22:12 +0000 UTC" firstStartedPulling="2025-11-21 14:22:14.423375319 +0000 UTC m=+1125.075574578" lastFinishedPulling="2025-11-21 14:23:03.553213928 +0000 UTC m=+1174.205413177" observedRunningTime="2025-11-21 14:23:38.81417509 +0000 UTC m=+1209.466374359" watchObservedRunningTime="2025-11-21 14:23:38.818319459 +0000 UTC m=+1209.470518718" Nov 21 14:23:38 crc kubenswrapper[4774]: I1121 14:23:38.855845 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.347568242 podStartE2EDuration="1m25.855798235s" podCreationTimestamp="2025-11-21 14:22:13 +0000 UTC" firstStartedPulling="2025-11-21 14:22:15.077646403 +0000 UTC m=+1125.729845662" lastFinishedPulling="2025-11-21 14:23:03.585876396 +0000 UTC m=+1174.238075655" observedRunningTime="2025-11-21 14:23:38.846243171 +0000 UTC m=+1209.498442430" watchObservedRunningTime="2025-11-21 14:23:38.855798235 +0000 UTC m=+1209.507997494" Nov 21 14:23:41 crc kubenswrapper[4774]: I1121 14:23:41.834242 4774 generic.go:334] "Generic (PLEG): container finished" podID="f4cff911-3af8-45f1-b86a-d3629217b328" containerID="a63e72ed87ad6e5829063d53d36ef358e0c60f0ac83c22a939e23b0679064a2e" exitCode=0 Nov 21 14:23:41 crc kubenswrapper[4774]: I1121 14:23:41.834706 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-287qb" event={"ID":"f4cff911-3af8-45f1-b86a-d3629217b328","Type":"ContainerDied","Data":"a63e72ed87ad6e5829063d53d36ef358e0c60f0ac83c22a939e23b0679064a2e"} Nov 21 14:23:49 crc kubenswrapper[4774]: E1121 14:23:49.920099 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api@sha256:8c7ecaaf282fb3dd419c02a3e017d5f190e1e0831965f1ce366b9763700b4e4a" Nov 21 14:23:49 crc kubenswrapper[4774]: E1121 14:23:49.920978 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api@sha256:8c7ecaaf282fb3dd419c02a3e017d5f190e1e0831965f1ce366b9763700b4e4a,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r8jbv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-6wkpn_openstack(e3386949-a3f5-453c-953e-8deedb418d28): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:23:49 crc kubenswrapper[4774]: E1121 14:23:49.923193 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-6wkpn" podUID="e3386949-a3f5-453c-953e-8deedb418d28" Nov 21 14:23:49 crc kubenswrapper[4774]: I1121 14:23:49.923257 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-287qb" event={"ID":"f4cff911-3af8-45f1-b86a-d3629217b328","Type":"ContainerDied","Data":"466c5248f27112b759325850b15b683ce65d6679f7f4e603b326e14afa4d2864"} Nov 21 14:23:49 crc kubenswrapper[4774]: I1121 14:23:49.923405 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="466c5248f27112b759325850b15b683ce65d6679f7f4e603b326e14afa4d2864" Nov 21 14:23:49 crc kubenswrapper[4774]: I1121 14:23:49.959323 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.016761 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-swiftconf\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.016946 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-ring-data-devices\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.016975 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-scripts\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.017013 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-dispersionconf\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.017071 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhsh2\" (UniqueName: \"kubernetes.io/projected/f4cff911-3af8-45f1-b86a-d3629217b328-kube-api-access-zhsh2\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.017166 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-combined-ca-bundle\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.017187 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4cff911-3af8-45f1-b86a-d3629217b328-etc-swift\") pod \"f4cff911-3af8-45f1-b86a-d3629217b328\" (UID: \"f4cff911-3af8-45f1-b86a-d3629217b328\") " Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.018313 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4cff911-3af8-45f1-b86a-d3629217b328-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.018560 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.025243 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4cff911-3af8-45f1-b86a-d3629217b328-kube-api-access-zhsh2" (OuterVolumeSpecName: "kube-api-access-zhsh2") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "kube-api-access-zhsh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.031876 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.042419 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-scripts" (OuterVolumeSpecName: "scripts") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.047791 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.060456 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4cff911-3af8-45f1-b86a-d3629217b328" (UID: "f4cff911-3af8-45f1-b86a-d3629217b328"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120137 4774 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-ring-data-devices\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120177 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f4cff911-3af8-45f1-b86a-d3629217b328-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120188 4774 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-dispersionconf\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120199 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhsh2\" (UniqueName: \"kubernetes.io/projected/f4cff911-3af8-45f1-b86a-d3629217b328-kube-api-access-zhsh2\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120211 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120221 4774 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f4cff911-3af8-45f1-b86a-d3629217b328-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.120235 4774 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f4cff911-3af8-45f1-b86a-d3629217b328-swiftconf\") on node \"crc\" DevicePath \"\"" Nov 21 14:23:50 crc kubenswrapper[4774]: I1121 14:23:50.933399 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-287qb" Nov 21 14:23:50 crc kubenswrapper[4774]: E1121 14:23:50.937507 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api@sha256:8c7ecaaf282fb3dd419c02a3e017d5f190e1e0831965f1ce366b9763700b4e4a\\\"\"" pod="openstack/glance-db-sync-6wkpn" podUID="e3386949-a3f5-453c-953e-8deedb418d28" Nov 21 14:23:52 crc kubenswrapper[4774]: I1121 14:23:52.675854 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:52 crc kubenswrapper[4774]: I1121 14:23:52.696329 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"swift-storage-0\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " pod="openstack/swift-storage-0" Nov 21 14:23:52 crc kubenswrapper[4774]: I1121 14:23:52.879004 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 21 14:23:53 crc kubenswrapper[4774]: I1121 14:23:53.444389 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Nov 21 14:23:53 crc kubenswrapper[4774]: I1121 14:23:53.776088 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 21 14:23:53 crc kubenswrapper[4774]: I1121 14:23:53.964095 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"a49286afe01eea3dff82dd38e3ea2a2c9e9de8138caa4af5df5fc4cbab4325a5"} Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.160458 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-29l4m"] Nov 21 14:23:54 crc kubenswrapper[4774]: E1121 14:23:54.161395 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0447367-db69-44ff-8077-29ac2c200dbf" containerName="mariadb-database-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161421 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0447367-db69-44ff-8077-29ac2c200dbf" containerName="mariadb-database-create" Nov 21 14:23:54 crc kubenswrapper[4774]: E1121 14:23:54.161439 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab5c0068-d61b-4d09-8632-70a5b637910c" containerName="mariadb-database-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161450 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab5c0068-d61b-4d09-8632-70a5b637910c" containerName="mariadb-database-create" Nov 21 14:23:54 crc kubenswrapper[4774]: E1121 14:23:54.161479 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" containerName="mariadb-account-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161488 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" containerName="mariadb-account-create" Nov 21 14:23:54 crc kubenswrapper[4774]: E1121 14:23:54.161507 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7e56cf-c53d-4d3e-8e76-a8de6556546b" containerName="mariadb-account-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161516 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7e56cf-c53d-4d3e-8e76-a8de6556546b" containerName="mariadb-account-create" Nov 21 14:23:54 crc kubenswrapper[4774]: E1121 14:23:54.161547 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de865f32-866b-4e4f-8b90-a13641b5cfec" containerName="ovn-config" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161556 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="de865f32-866b-4e4f-8b90-a13641b5cfec" containerName="ovn-config" Nov 21 14:23:54 crc kubenswrapper[4774]: E1121 14:23:54.161577 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4cff911-3af8-45f1-b86a-d3629217b328" containerName="swift-ring-rebalance" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161587 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4cff911-3af8-45f1-b86a-d3629217b328" containerName="swift-ring-rebalance" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161873 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7e56cf-c53d-4d3e-8e76-a8de6556546b" containerName="mariadb-account-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161900 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4cff911-3af8-45f1-b86a-d3629217b328" containerName="swift-ring-rebalance" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161920 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="de865f32-866b-4e4f-8b90-a13641b5cfec" containerName="ovn-config" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161938 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" containerName="mariadb-account-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161953 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab5c0068-d61b-4d09-8632-70a5b637910c" containerName="mariadb-database-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.161963 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0447367-db69-44ff-8077-29ac2c200dbf" containerName="mariadb-database-create" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.162799 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.178695 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-29l4m"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.234654 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m94d\" (UniqueName: \"kubernetes.io/projected/c0e7f310-44e3-41ea-b143-cc1074c854a6-kube-api-access-2m94d\") pod \"cinder-db-create-29l4m\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.235004 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0e7f310-44e3-41ea-b143-cc1074c854a6-operator-scripts\") pod \"cinder-db-create-29l4m\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.299518 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-j6zr8"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.305124 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.308743 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-j6zr8"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.340012 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m94d\" (UniqueName: \"kubernetes.io/projected/c0e7f310-44e3-41ea-b143-cc1074c854a6-kube-api-access-2m94d\") pod \"cinder-db-create-29l4m\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.340159 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0e7f310-44e3-41ea-b143-cc1074c854a6-operator-scripts\") pod \"cinder-db-create-29l4m\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.341098 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0e7f310-44e3-41ea-b143-cc1074c854a6-operator-scripts\") pod \"cinder-db-create-29l4m\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.375515 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5822-account-create-7wb8z"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.383810 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.387254 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.394449 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5822-account-create-7wb8z"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.402238 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m94d\" (UniqueName: \"kubernetes.io/projected/c0e7f310-44e3-41ea-b143-cc1074c854a6-kube-api-access-2m94d\") pod \"cinder-db-create-29l4m\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.442165 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/874bb9d0-cbc9-4158-928d-8d6267fa02ab-operator-scripts\") pod \"barbican-db-create-j6zr8\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.442284 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkvwp\" (UniqueName: \"kubernetes.io/projected/874bb9d0-cbc9-4158-928d-8d6267fa02ab-kube-api-access-jkvwp\") pod \"barbican-db-create-j6zr8\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.454619 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.517803 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-mpj9v"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.520116 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.527273 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lxmk4" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.527569 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29l4m" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.540226 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.540558 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.540735 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.545299 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f971e8a-d223-4b26-860a-b2ea8f3d545f-operator-scripts\") pod \"cinder-5822-account-create-7wb8z\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.545442 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkvwp\" (UniqueName: \"kubernetes.io/projected/874bb9d0-cbc9-4158-928d-8d6267fa02ab-kube-api-access-jkvwp\") pod \"barbican-db-create-j6zr8\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.545735 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5w86\" (UniqueName: \"kubernetes.io/projected/8f971e8a-d223-4b26-860a-b2ea8f3d545f-kube-api-access-j5w86\") pod \"cinder-5822-account-create-7wb8z\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.545785 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/874bb9d0-cbc9-4158-928d-8d6267fa02ab-operator-scripts\") pod \"barbican-db-create-j6zr8\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.546633 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/874bb9d0-cbc9-4158-928d-8d6267fa02ab-operator-scripts\") pod \"barbican-db-create-j6zr8\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.550788 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-6523-account-create-4fm5p"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.552189 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.568233 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.568232 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mpj9v"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.588552 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6523-account-create-4fm5p"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.636637 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkvwp\" (UniqueName: \"kubernetes.io/projected/874bb9d0-cbc9-4158-928d-8d6267fa02ab-kube-api-access-jkvwp\") pod \"barbican-db-create-j6zr8\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653241 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5w86\" (UniqueName: \"kubernetes.io/projected/8f971e8a-d223-4b26-860a-b2ea8f3d545f-kube-api-access-j5w86\") pod \"cinder-5822-account-create-7wb8z\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653287 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ae3ef-3b18-4646-8b3c-34b6db752b22-operator-scripts\") pod \"barbican-6523-account-create-4fm5p\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653338 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f971e8a-d223-4b26-860a-b2ea8f3d545f-operator-scripts\") pod \"cinder-5822-account-create-7wb8z\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653373 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-config-data\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653413 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmv6p\" (UniqueName: \"kubernetes.io/projected/df167c70-6bf6-4221-ac4d-fe967e1abaac-kube-api-access-bmv6p\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653456 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65vn9\" (UniqueName: \"kubernetes.io/projected/596ae3ef-3b18-4646-8b3c-34b6db752b22-kube-api-access-65vn9\") pod \"barbican-6523-account-create-4fm5p\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.653496 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-combined-ca-bundle\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.654600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f971e8a-d223-4b26-860a-b2ea8f3d545f-operator-scripts\") pod \"cinder-5822-account-create-7wb8z\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.667013 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-mhzgm"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.668566 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.700224 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mhzgm"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.702375 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5w86\" (UniqueName: \"kubernetes.io/projected/8f971e8a-d223-4b26-860a-b2ea8f3d545f-kube-api-access-j5w86\") pod \"cinder-5822-account-create-7wb8z\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.764634 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.765688 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-combined-ca-bundle\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.766521 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ae3ef-3b18-4646-8b3c-34b6db752b22-operator-scripts\") pod \"barbican-6523-account-create-4fm5p\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.767093 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dvwr\" (UniqueName: \"kubernetes.io/projected/2a52bd28-14b8-4988-a291-6072e60211f3-kube-api-access-6dvwr\") pod \"neutron-db-create-mhzgm\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.767170 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-config-data\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.767339 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmv6p\" (UniqueName: \"kubernetes.io/projected/df167c70-6bf6-4221-ac4d-fe967e1abaac-kube-api-access-bmv6p\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.767592 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a52bd28-14b8-4988-a291-6072e60211f3-operator-scripts\") pod \"neutron-db-create-mhzgm\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.767673 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65vn9\" (UniqueName: \"kubernetes.io/projected/596ae3ef-3b18-4646-8b3c-34b6db752b22-kube-api-access-65vn9\") pod \"barbican-6523-account-create-4fm5p\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.767734 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ae3ef-3b18-4646-8b3c-34b6db752b22-operator-scripts\") pod \"barbican-6523-account-create-4fm5p\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.773809 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-combined-ca-bundle\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.776630 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-config-data\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.822626 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmv6p\" (UniqueName: \"kubernetes.io/projected/df167c70-6bf6-4221-ac4d-fe967e1abaac-kube-api-access-bmv6p\") pod \"keystone-db-sync-mpj9v\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.834442 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65vn9\" (UniqueName: \"kubernetes.io/projected/596ae3ef-3b18-4646-8b3c-34b6db752b22-kube-api-access-65vn9\") pod \"barbican-6523-account-create-4fm5p\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.848483 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.880584 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dvwr\" (UniqueName: \"kubernetes.io/projected/2a52bd28-14b8-4988-a291-6072e60211f3-kube-api-access-6dvwr\") pod \"neutron-db-create-mhzgm\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.880664 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a52bd28-14b8-4988-a291-6072e60211f3-operator-scripts\") pod \"neutron-db-create-mhzgm\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.882282 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a52bd28-14b8-4988-a291-6072e60211f3-operator-scripts\") pod \"neutron-db-create-mhzgm\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.901696 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-fb24-account-create-tlmv2"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.903117 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.907775 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.920530 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dvwr\" (UniqueName: \"kubernetes.io/projected/2a52bd28-14b8-4988-a291-6072e60211f3-kube-api-access-6dvwr\") pod \"neutron-db-create-mhzgm\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.933734 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-j6zr8" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.960229 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-fb24-account-create-tlmv2"] Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.982041 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed1a3ac-e920-47b6-b864-6f1ec34c0770-operator-scripts\") pod \"neutron-fb24-account-create-tlmv2\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:54 crc kubenswrapper[4774]: I1121 14:23:54.982127 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq8b9\" (UniqueName: \"kubernetes.io/projected/fed1a3ac-e920-47b6-b864-6f1ec34c0770-kube-api-access-mq8b9\") pod \"neutron-fb24-account-create-tlmv2\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.077911 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.084195 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed1a3ac-e920-47b6-b864-6f1ec34c0770-operator-scripts\") pod \"neutron-fb24-account-create-tlmv2\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.084266 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq8b9\" (UniqueName: \"kubernetes.io/projected/fed1a3ac-e920-47b6-b864-6f1ec34c0770-kube-api-access-mq8b9\") pod \"neutron-fb24-account-create-tlmv2\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.085573 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed1a3ac-e920-47b6-b864-6f1ec34c0770-operator-scripts\") pod \"neutron-fb24-account-create-tlmv2\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.108072 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mhzgm" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.109076 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq8b9\" (UniqueName: \"kubernetes.io/projected/fed1a3ac-e920-47b6-b864-6f1ec34c0770-kube-api-access-mq8b9\") pod \"neutron-fb24-account-create-tlmv2\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.256300 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.554513 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-29l4m"] Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.561365 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-mpj9v"] Nov 21 14:23:55 crc kubenswrapper[4774]: W1121 14:23:55.637623 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf167c70_6bf6_4221_ac4d_fe967e1abaac.slice/crio-6c76dc60cf6c0f1b18d4daee225df50d8afcecaf403cbb30946e78e783e75ec4 WatchSource:0}: Error finding container 6c76dc60cf6c0f1b18d4daee225df50d8afcecaf403cbb30946e78e783e75ec4: Status 404 returned error can't find the container with id 6c76dc60cf6c0f1b18d4daee225df50d8afcecaf403cbb30946e78e783e75ec4 Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.756372 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5822-account-create-7wb8z"] Nov 21 14:23:55 crc kubenswrapper[4774]: I1121 14:23:55.787435 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-j6zr8"] Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.046343 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29l4m" event={"ID":"c0e7f310-44e3-41ea-b143-cc1074c854a6","Type":"ContainerStarted","Data":"ca6fa67812458359cc7c46a1e0a8f631e7f0c2a4fce9e694b0354848c9190f8a"} Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.053054 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mpj9v" event={"ID":"df167c70-6bf6-4221-ac4d-fe967e1abaac","Type":"ContainerStarted","Data":"6c76dc60cf6c0f1b18d4daee225df50d8afcecaf403cbb30946e78e783e75ec4"} Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.063008 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5822-account-create-7wb8z" event={"ID":"8f971e8a-d223-4b26-860a-b2ea8f3d545f","Type":"ContainerStarted","Data":"42ea9a972b4ede7f3f3bdb1743b93ce3ea80e08c13c1e0fd16774eed52a8d310"} Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.064808 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-j6zr8" event={"ID":"874bb9d0-cbc9-4158-928d-8d6267fa02ab","Type":"ContainerStarted","Data":"54b0cd8fb3bd556016af0b55f39061f53aeb3a09791d27f955e23a314788ec5b"} Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.298811 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-6523-account-create-4fm5p"] Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.358076 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-mhzgm"] Nov 21 14:23:56 crc kubenswrapper[4774]: W1121 14:23:56.381789 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a52bd28_14b8_4988_a291_6072e60211f3.slice/crio-94f28c2ce432711409e43d605a54635eeb560bc702f4dfaba2b3dc901538989b WatchSource:0}: Error finding container 94f28c2ce432711409e43d605a54635eeb560bc702f4dfaba2b3dc901538989b: Status 404 returned error can't find the container with id 94f28c2ce432711409e43d605a54635eeb560bc702f4dfaba2b3dc901538989b Nov 21 14:23:56 crc kubenswrapper[4774]: I1121 14:23:56.494505 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-fb24-account-create-tlmv2"] Nov 21 14:23:56 crc kubenswrapper[4774]: W1121 14:23:56.538732 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfed1a3ac_e920_47b6_b864_6f1ec34c0770.slice/crio-2f381845987d01f3ac56d9127b6a9cae6f2ba052f363bec4c8233579a9305e90 WatchSource:0}: Error finding container 2f381845987d01f3ac56d9127b6a9cae6f2ba052f363bec4c8233579a9305e90: Status 404 returned error can't find the container with id 2f381845987d01f3ac56d9127b6a9cae6f2ba052f363bec4c8233579a9305e90 Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.107185 4774 generic.go:334] "Generic (PLEG): container finished" podID="c0e7f310-44e3-41ea-b143-cc1074c854a6" containerID="01fd58ee9988246dbee8d387c653c96d53a5ed6e239bbaeea7f3c93f959a0ad6" exitCode=0 Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.107430 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29l4m" event={"ID":"c0e7f310-44e3-41ea-b143-cc1074c854a6","Type":"ContainerDied","Data":"01fd58ee9988246dbee8d387c653c96d53a5ed6e239bbaeea7f3c93f959a0ad6"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.134557 4774 generic.go:334] "Generic (PLEG): container finished" podID="8f971e8a-d223-4b26-860a-b2ea8f3d545f" containerID="f5da3006308e36eb20210e041418de5b32da8956f89007e299267104f93eea3b" exitCode=0 Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.135320 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5822-account-create-7wb8z" event={"ID":"8f971e8a-d223-4b26-860a-b2ea8f3d545f","Type":"ContainerDied","Data":"f5da3006308e36eb20210e041418de5b32da8956f89007e299267104f93eea3b"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.147837 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb24-account-create-tlmv2" event={"ID":"fed1a3ac-e920-47b6-b864-6f1ec34c0770","Type":"ContainerStarted","Data":"448856a50b1cbeaf51347f942b94dae57d6ecf3b79598bf3ce4b6f069353bd77"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.147908 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb24-account-create-tlmv2" event={"ID":"fed1a3ac-e920-47b6-b864-6f1ec34c0770","Type":"ContainerStarted","Data":"2f381845987d01f3ac56d9127b6a9cae6f2ba052f363bec4c8233579a9305e90"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.153450 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6523-account-create-4fm5p" event={"ID":"596ae3ef-3b18-4646-8b3c-34b6db752b22","Type":"ContainerStarted","Data":"5ebb6d26c1ba872afd7ee0103564de8a6a3cffae676294d1aa9ec716904f0544"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.153505 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6523-account-create-4fm5p" event={"ID":"596ae3ef-3b18-4646-8b3c-34b6db752b22","Type":"ContainerStarted","Data":"b3458a27519208c3b2ec9be248e03c7c4e47963369b0f5ba4dce775aee34bbf1"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.166631 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mhzgm" event={"ID":"2a52bd28-14b8-4988-a291-6072e60211f3","Type":"ContainerStarted","Data":"029dc3912d84b8c7abbaa6bf0010eb02ce724b0c91e424cdb8dbcc692724db07"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.167031 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mhzgm" event={"ID":"2a52bd28-14b8-4988-a291-6072e60211f3","Type":"ContainerStarted","Data":"94f28c2ce432711409e43d605a54635eeb560bc702f4dfaba2b3dc901538989b"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.169553 4774 generic.go:334] "Generic (PLEG): container finished" podID="874bb9d0-cbc9-4158-928d-8d6267fa02ab" containerID="ae147c874c0b0365082cb788ba5bfc4259836a02272f29d3be8ca3a93c55d41e" exitCode=0 Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.169780 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-j6zr8" event={"ID":"874bb9d0-cbc9-4158-928d-8d6267fa02ab","Type":"ContainerDied","Data":"ae147c874c0b0365082cb788ba5bfc4259836a02272f29d3be8ca3a93c55d41e"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.173032 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"4a079344642c9fb3a26394a82468d59daabece732a5466662ee8aeaa883a5bb3"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.173173 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"d3b647fd9ca3744848c9bba9996b244e70638e808df6e12566f545983a15f3cc"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.173261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"9bc27234572696e44f557a383a86c888ed805788bafa91dd14bb78cdefab3b32"} Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.198214 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-6523-account-create-4fm5p" podStartSLOduration=3.198179922 podStartE2EDuration="3.198179922s" podCreationTimestamp="2025-11-21 14:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:23:57.18592814 +0000 UTC m=+1227.838127419" watchObservedRunningTime="2025-11-21 14:23:57.198179922 +0000 UTC m=+1227.850379181" Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.247402 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-fb24-account-create-tlmv2" podStartSLOduration=3.247374194 podStartE2EDuration="3.247374194s" podCreationTimestamp="2025-11-21 14:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:23:57.221431329 +0000 UTC m=+1227.873630588" watchObservedRunningTime="2025-11-21 14:23:57.247374194 +0000 UTC m=+1227.899573453" Nov 21 14:23:57 crc kubenswrapper[4774]: I1121 14:23:57.272215 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-mhzgm" podStartSLOduration=3.272185996 podStartE2EDuration="3.272185996s" podCreationTimestamp="2025-11-21 14:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:23:57.260858051 +0000 UTC m=+1227.913057340" watchObservedRunningTime="2025-11-21 14:23:57.272185996 +0000 UTC m=+1227.924385255" Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.191669 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"78cf57423bdba8a0adb0930011b88d9283fb739e1b67a73287f7ff3ca582a4a1"} Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.195020 4774 generic.go:334] "Generic (PLEG): container finished" podID="fed1a3ac-e920-47b6-b864-6f1ec34c0770" containerID="448856a50b1cbeaf51347f942b94dae57d6ecf3b79598bf3ce4b6f069353bd77" exitCode=0 Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.195091 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb24-account-create-tlmv2" event={"ID":"fed1a3ac-e920-47b6-b864-6f1ec34c0770","Type":"ContainerDied","Data":"448856a50b1cbeaf51347f942b94dae57d6ecf3b79598bf3ce4b6f069353bd77"} Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.205257 4774 generic.go:334] "Generic (PLEG): container finished" podID="596ae3ef-3b18-4646-8b3c-34b6db752b22" containerID="5ebb6d26c1ba872afd7ee0103564de8a6a3cffae676294d1aa9ec716904f0544" exitCode=0 Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.205447 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6523-account-create-4fm5p" event={"ID":"596ae3ef-3b18-4646-8b3c-34b6db752b22","Type":"ContainerDied","Data":"5ebb6d26c1ba872afd7ee0103564de8a6a3cffae676294d1aa9ec716904f0544"} Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.208694 4774 generic.go:334] "Generic (PLEG): container finished" podID="2a52bd28-14b8-4988-a291-6072e60211f3" containerID="029dc3912d84b8c7abbaa6bf0010eb02ce724b0c91e424cdb8dbcc692724db07" exitCode=0 Nov 21 14:23:58 crc kubenswrapper[4774]: I1121 14:23:58.208746 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mhzgm" event={"ID":"2a52bd28-14b8-4988-a291-6072e60211f3","Type":"ContainerDied","Data":"029dc3912d84b8c7abbaa6bf0010eb02ce724b0c91e424cdb8dbcc692724db07"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.073126 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.085422 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mhzgm" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.100864 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.116519 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29l4m" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.130654 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.144213 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-j6zr8" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.148091 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5w86\" (UniqueName: \"kubernetes.io/projected/8f971e8a-d223-4b26-860a-b2ea8f3d545f-kube-api-access-j5w86\") pod \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.148174 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dvwr\" (UniqueName: \"kubernetes.io/projected/2a52bd28-14b8-4988-a291-6072e60211f3-kube-api-access-6dvwr\") pod \"2a52bd28-14b8-4988-a291-6072e60211f3\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.148270 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq8b9\" (UniqueName: \"kubernetes.io/projected/fed1a3ac-e920-47b6-b864-6f1ec34c0770-kube-api-access-mq8b9\") pod \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.148418 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f971e8a-d223-4b26-860a-b2ea8f3d545f-operator-scripts\") pod \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\" (UID: \"8f971e8a-d223-4b26-860a-b2ea8f3d545f\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.148485 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed1a3ac-e920-47b6-b864-6f1ec34c0770-operator-scripts\") pod \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\" (UID: \"fed1a3ac-e920-47b6-b864-6f1ec34c0770\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.148541 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a52bd28-14b8-4988-a291-6072e60211f3-operator-scripts\") pod \"2a52bd28-14b8-4988-a291-6072e60211f3\" (UID: \"2a52bd28-14b8-4988-a291-6072e60211f3\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.150284 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f971e8a-d223-4b26-860a-b2ea8f3d545f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8f971e8a-d223-4b26-860a-b2ea8f3d545f" (UID: "8f971e8a-d223-4b26-860a-b2ea8f3d545f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.151121 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fed1a3ac-e920-47b6-b864-6f1ec34c0770-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fed1a3ac-e920-47b6-b864-6f1ec34c0770" (UID: "fed1a3ac-e920-47b6-b864-6f1ec34c0770"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.152487 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a52bd28-14b8-4988-a291-6072e60211f3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a52bd28-14b8-4988-a291-6072e60211f3" (UID: "2a52bd28-14b8-4988-a291-6072e60211f3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.165707 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f971e8a-d223-4b26-860a-b2ea8f3d545f-kube-api-access-j5w86" (OuterVolumeSpecName: "kube-api-access-j5w86") pod "8f971e8a-d223-4b26-860a-b2ea8f3d545f" (UID: "8f971e8a-d223-4b26-860a-b2ea8f3d545f"). InnerVolumeSpecName "kube-api-access-j5w86". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.165986 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a52bd28-14b8-4988-a291-6072e60211f3-kube-api-access-6dvwr" (OuterVolumeSpecName: "kube-api-access-6dvwr") pod "2a52bd28-14b8-4988-a291-6072e60211f3" (UID: "2a52bd28-14b8-4988-a291-6072e60211f3"). InnerVolumeSpecName "kube-api-access-6dvwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.166339 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fed1a3ac-e920-47b6-b864-6f1ec34c0770-kube-api-access-mq8b9" (OuterVolumeSpecName: "kube-api-access-mq8b9") pod "fed1a3ac-e920-47b6-b864-6f1ec34c0770" (UID: "fed1a3ac-e920-47b6-b864-6f1ec34c0770"). InnerVolumeSpecName "kube-api-access-mq8b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.238729 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-j6zr8" event={"ID":"874bb9d0-cbc9-4158-928d-8d6267fa02ab","Type":"ContainerDied","Data":"54b0cd8fb3bd556016af0b55f39061f53aeb3a09791d27f955e23a314788ec5b"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.239247 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54b0cd8fb3bd556016af0b55f39061f53aeb3a09791d27f955e23a314788ec5b" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.239319 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-j6zr8" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.241973 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-29l4m" event={"ID":"c0e7f310-44e3-41ea-b143-cc1074c854a6","Type":"ContainerDied","Data":"ca6fa67812458359cc7c46a1e0a8f631e7f0c2a4fce9e694b0354848c9190f8a"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.242000 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca6fa67812458359cc7c46a1e0a8f631e7f0c2a4fce9e694b0354848c9190f8a" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.242050 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-29l4m" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.245584 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5822-account-create-7wb8z" event={"ID":"8f971e8a-d223-4b26-860a-b2ea8f3d545f","Type":"ContainerDied","Data":"42ea9a972b4ede7f3f3bdb1743b93ce3ea80e08c13c1e0fd16774eed52a8d310"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.245627 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42ea9a972b4ede7f3f3bdb1743b93ce3ea80e08c13c1e0fd16774eed52a8d310" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.245689 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5822-account-create-7wb8z" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.249861 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0e7f310-44e3-41ea-b143-cc1074c854a6-operator-scripts\") pod \"c0e7f310-44e3-41ea-b143-cc1074c854a6\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.250273 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65vn9\" (UniqueName: \"kubernetes.io/projected/596ae3ef-3b18-4646-8b3c-34b6db752b22-kube-api-access-65vn9\") pod \"596ae3ef-3b18-4646-8b3c-34b6db752b22\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.250323 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m94d\" (UniqueName: \"kubernetes.io/projected/c0e7f310-44e3-41ea-b143-cc1074c854a6-kube-api-access-2m94d\") pod \"c0e7f310-44e3-41ea-b143-cc1074c854a6\" (UID: \"c0e7f310-44e3-41ea-b143-cc1074c854a6\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.250421 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkvwp\" (UniqueName: \"kubernetes.io/projected/874bb9d0-cbc9-4158-928d-8d6267fa02ab-kube-api-access-jkvwp\") pod \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.250513 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ae3ef-3b18-4646-8b3c-34b6db752b22-operator-scripts\") pod \"596ae3ef-3b18-4646-8b3c-34b6db752b22\" (UID: \"596ae3ef-3b18-4646-8b3c-34b6db752b22\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.250552 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/874bb9d0-cbc9-4158-928d-8d6267fa02ab-operator-scripts\") pod \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\" (UID: \"874bb9d0-cbc9-4158-928d-8d6267fa02ab\") " Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251285 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dvwr\" (UniqueName: \"kubernetes.io/projected/2a52bd28-14b8-4988-a291-6072e60211f3-kube-api-access-6dvwr\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251314 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq8b9\" (UniqueName: \"kubernetes.io/projected/fed1a3ac-e920-47b6-b864-6f1ec34c0770-kube-api-access-mq8b9\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251328 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8f971e8a-d223-4b26-860a-b2ea8f3d545f-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251341 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fed1a3ac-e920-47b6-b864-6f1ec34c0770-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251357 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a52bd28-14b8-4988-a291-6072e60211f3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251370 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5w86\" (UniqueName: \"kubernetes.io/projected/8f971e8a-d223-4b26-860a-b2ea8f3d545f-kube-api-access-j5w86\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251540 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-fb24-account-create-tlmv2" event={"ID":"fed1a3ac-e920-47b6-b864-6f1ec34c0770","Type":"ContainerDied","Data":"2f381845987d01f3ac56d9127b6a9cae6f2ba052f363bec4c8233579a9305e90"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251584 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f381845987d01f3ac56d9127b6a9cae6f2ba052f363bec4c8233579a9305e90" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251699 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-fb24-account-create-tlmv2" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251870 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/874bb9d0-cbc9-4158-928d-8d6267fa02ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "874bb9d0-cbc9-4158-928d-8d6267fa02ab" (UID: "874bb9d0-cbc9-4158-928d-8d6267fa02ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.251960 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/596ae3ef-3b18-4646-8b3c-34b6db752b22-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "596ae3ef-3b18-4646-8b3c-34b6db752b22" (UID: "596ae3ef-3b18-4646-8b3c-34b6db752b22"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.252800 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0e7f310-44e3-41ea-b143-cc1074c854a6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c0e7f310-44e3-41ea-b143-cc1074c854a6" (UID: "c0e7f310-44e3-41ea-b143-cc1074c854a6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.255771 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0e7f310-44e3-41ea-b143-cc1074c854a6-kube-api-access-2m94d" (OuterVolumeSpecName: "kube-api-access-2m94d") pod "c0e7f310-44e3-41ea-b143-cc1074c854a6" (UID: "c0e7f310-44e3-41ea-b143-cc1074c854a6"). InnerVolumeSpecName "kube-api-access-2m94d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.256152 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596ae3ef-3b18-4646-8b3c-34b6db752b22-kube-api-access-65vn9" (OuterVolumeSpecName: "kube-api-access-65vn9") pod "596ae3ef-3b18-4646-8b3c-34b6db752b22" (UID: "596ae3ef-3b18-4646-8b3c-34b6db752b22"). InnerVolumeSpecName "kube-api-access-65vn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.259749 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-6523-account-create-4fm5p" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.259841 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-6523-account-create-4fm5p" event={"ID":"596ae3ef-3b18-4646-8b3c-34b6db752b22","Type":"ContainerDied","Data":"b3458a27519208c3b2ec9be248e03c7c4e47963369b0f5ba4dce775aee34bbf1"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.260357 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3458a27519208c3b2ec9be248e03c7c4e47963369b0f5ba4dce775aee34bbf1" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.261683 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-mhzgm" event={"ID":"2a52bd28-14b8-4988-a291-6072e60211f3","Type":"ContainerDied","Data":"94f28c2ce432711409e43d605a54635eeb560bc702f4dfaba2b3dc901538989b"} Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.261711 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94f28c2ce432711409e43d605a54635eeb560bc702f4dfaba2b3dc901538989b" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.261742 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-mhzgm" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.267368 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/874bb9d0-cbc9-4158-928d-8d6267fa02ab-kube-api-access-jkvwp" (OuterVolumeSpecName: "kube-api-access-jkvwp") pod "874bb9d0-cbc9-4158-928d-8d6267fa02ab" (UID: "874bb9d0-cbc9-4158-928d-8d6267fa02ab"). InnerVolumeSpecName "kube-api-access-jkvwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.353010 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkvwp\" (UniqueName: \"kubernetes.io/projected/874bb9d0-cbc9-4158-928d-8d6267fa02ab-kube-api-access-jkvwp\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.353047 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/596ae3ef-3b18-4646-8b3c-34b6db752b22-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.353057 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/874bb9d0-cbc9-4158-928d-8d6267fa02ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.353069 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0e7f310-44e3-41ea-b143-cc1074c854a6-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.353078 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65vn9\" (UniqueName: \"kubernetes.io/projected/596ae3ef-3b18-4646-8b3c-34b6db752b22-kube-api-access-65vn9\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:01 crc kubenswrapper[4774]: I1121 14:24:01.353090 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m94d\" (UniqueName: \"kubernetes.io/projected/c0e7f310-44e3-41ea-b143-cc1074c854a6-kube-api-access-2m94d\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:02 crc kubenswrapper[4774]: I1121 14:24:02.292973 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"51b9f59856cff7bf6c2e7c193206a014c3b1c1b6ff7e65f0ffe94ef9fbaf701e"} Nov 21 14:24:02 crc kubenswrapper[4774]: I1121 14:24:02.293418 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"b0ff7c749c18817ad064c15649c712f4e89466819f6dd77e940ca84ed95e90a8"} Nov 21 14:24:02 crc kubenswrapper[4774]: I1121 14:24:02.293437 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"b02da81747033bd11ee8ad86892e420553f3c4e14b394a17b83ad199bf283c8e"} Nov 21 14:24:02 crc kubenswrapper[4774]: I1121 14:24:02.298178 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mpj9v" event={"ID":"df167c70-6bf6-4221-ac4d-fe967e1abaac","Type":"ContainerStarted","Data":"df99df975d7724db717054000f116a20d79041d1f3c9746269b3fac4603daed4"} Nov 21 14:24:02 crc kubenswrapper[4774]: I1121 14:24:02.334673 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-mpj9v" podStartSLOduration=2.470423329 podStartE2EDuration="8.334637835s" podCreationTimestamp="2025-11-21 14:23:54 +0000 UTC" firstStartedPulling="2025-11-21 14:23:55.663542109 +0000 UTC m=+1226.315741368" lastFinishedPulling="2025-11-21 14:24:01.527756615 +0000 UTC m=+1232.179955874" observedRunningTime="2025-11-21 14:24:02.322663259 +0000 UTC m=+1232.974862518" watchObservedRunningTime="2025-11-21 14:24:02.334637835 +0000 UTC m=+1232.986837094" Nov 21 14:24:03 crc kubenswrapper[4774]: I1121 14:24:03.314524 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"53a824add7ac0cc57042d70c06b911c7e6a34e1c2010603ee4d6fbc3ed438924"} Nov 21 14:24:03 crc kubenswrapper[4774]: I1121 14:24:03.318365 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6wkpn" event={"ID":"e3386949-a3f5-453c-953e-8deedb418d28","Type":"ContainerStarted","Data":"bb5a89d0866ada50c918454a8f38ceeea054b4043bfb044f638cc16eb0514685"} Nov 21 14:24:03 crc kubenswrapper[4774]: I1121 14:24:03.353307 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-6wkpn" podStartSLOduration=3.718515613 podStartE2EDuration="31.353281008s" podCreationTimestamp="2025-11-21 14:23:32 +0000 UTC" firstStartedPulling="2025-11-21 14:23:34.080012384 +0000 UTC m=+1204.732211643" lastFinishedPulling="2025-11-21 14:24:01.714777779 +0000 UTC m=+1232.366977038" observedRunningTime="2025-11-21 14:24:03.3454061 +0000 UTC m=+1233.997605369" watchObservedRunningTime="2025-11-21 14:24:03.353281008 +0000 UTC m=+1234.005480267" Nov 21 14:24:04 crc kubenswrapper[4774]: I1121 14:24:04.332359 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"1c4505cc2138852bc1de85d4a3368df20df9e8fc72c0b4f0a772d89a565a9d5c"} Nov 21 14:24:04 crc kubenswrapper[4774]: I1121 14:24:04.332873 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5"} Nov 21 14:24:05 crc kubenswrapper[4774]: I1121 14:24:05.353038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5"} Nov 21 14:24:05 crc kubenswrapper[4774]: I1121 14:24:05.353656 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a"} Nov 21 14:24:05 crc kubenswrapper[4774]: I1121 14:24:05.353677 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"834a28d7bd2427951828771181afd8a938666b3888becfb912c78842574fb9ae"} Nov 21 14:24:05 crc kubenswrapper[4774]: I1121 14:24:05.353690 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"cd578a4be6466cb961f25126446b01ae08dfe77292401d7b6dc5269637ee2e33"} Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.364275 4774 generic.go:334] "Generic (PLEG): container finished" podID="df167c70-6bf6-4221-ac4d-fe967e1abaac" containerID="df99df975d7724db717054000f116a20d79041d1f3c9746269b3fac4603daed4" exitCode=0 Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.364392 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mpj9v" event={"ID":"df167c70-6bf6-4221-ac4d-fe967e1abaac","Type":"ContainerDied","Data":"df99df975d7724db717054000f116a20d79041d1f3c9746269b3fac4603daed4"} Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.371701 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerStarted","Data":"01b0a87790fffb8562d8320c5dbbbc5a07eb54a2e1277dfed78d3269edb2bee5"} Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.421265 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.971053808 podStartE2EDuration="47.421237651s" podCreationTimestamp="2025-11-21 14:23:19 +0000 UTC" firstStartedPulling="2025-11-21 14:23:53.455149271 +0000 UTC m=+1224.107348530" lastFinishedPulling="2025-11-21 14:24:03.905333114 +0000 UTC m=+1234.557532373" observedRunningTime="2025-11-21 14:24:06.419111599 +0000 UTC m=+1237.071310868" watchObservedRunningTime="2025-11-21 14:24:06.421237651 +0000 UTC m=+1237.073436910" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703104 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7bd88855cf-mncl7"] Nov 21 14:24:06 crc kubenswrapper[4774]: E1121 14:24:06.703539 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f971e8a-d223-4b26-860a-b2ea8f3d545f" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703559 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f971e8a-d223-4b26-860a-b2ea8f3d545f" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: E1121 14:24:06.703585 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0e7f310-44e3-41ea-b143-cc1074c854a6" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703592 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0e7f310-44e3-41ea-b143-cc1074c854a6" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: E1121 14:24:06.703606 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596ae3ef-3b18-4646-8b3c-34b6db752b22" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703614 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="596ae3ef-3b18-4646-8b3c-34b6db752b22" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: E1121 14:24:06.703635 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fed1a3ac-e920-47b6-b864-6f1ec34c0770" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703641 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fed1a3ac-e920-47b6-b864-6f1ec34c0770" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: E1121 14:24:06.703652 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a52bd28-14b8-4988-a291-6072e60211f3" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703658 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a52bd28-14b8-4988-a291-6072e60211f3" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: E1121 14:24:06.703671 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874bb9d0-cbc9-4158-928d-8d6267fa02ab" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703677 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="874bb9d0-cbc9-4158-928d-8d6267fa02ab" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703929 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="874bb9d0-cbc9-4158-928d-8d6267fa02ab" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703948 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fed1a3ac-e920-47b6-b864-6f1ec34c0770" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703959 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a52bd28-14b8-4988-a291-6072e60211f3" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703982 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f971e8a-d223-4b26-860a-b2ea8f3d545f" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.703991 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="596ae3ef-3b18-4646-8b3c-34b6db752b22" containerName="mariadb-account-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.704003 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0e7f310-44e3-41ea-b143-cc1074c854a6" containerName="mariadb-database-create" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.705091 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.715166 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.725738 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bd88855cf-mncl7"] Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.773091 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-nb\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.773178 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm8q9\" (UniqueName: \"kubernetes.io/projected/bc13c923-40fe-4ddf-95af-f149f3d399ef-kube-api-access-fm8q9\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.773332 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-config\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.773403 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-swift-storage-0\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.773439 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-svc\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.773515 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-sb\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.875464 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-sb\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.876018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-nb\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.876044 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm8q9\" (UniqueName: \"kubernetes.io/projected/bc13c923-40fe-4ddf-95af-f149f3d399ef-kube-api-access-fm8q9\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.876136 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-config\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.876384 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-swift-storage-0\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.876413 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-svc\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.877703 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-svc\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.877736 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-swift-storage-0\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.877745 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-config\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.877959 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-sb\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.878389 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-nb\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:06 crc kubenswrapper[4774]: I1121 14:24:06.901557 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm8q9\" (UniqueName: \"kubernetes.io/projected/bc13c923-40fe-4ddf-95af-f149f3d399ef-kube-api-access-fm8q9\") pod \"dnsmasq-dns-7bd88855cf-mncl7\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:07 crc kubenswrapper[4774]: I1121 14:24:07.024613 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:07 crc kubenswrapper[4774]: I1121 14:24:07.530685 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7bd88855cf-mncl7"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.677338 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.803141 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmv6p\" (UniqueName: \"kubernetes.io/projected/df167c70-6bf6-4221-ac4d-fe967e1abaac-kube-api-access-bmv6p\") pod \"df167c70-6bf6-4221-ac4d-fe967e1abaac\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.803253 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-combined-ca-bundle\") pod \"df167c70-6bf6-4221-ac4d-fe967e1abaac\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.804793 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-config-data\") pod \"df167c70-6bf6-4221-ac4d-fe967e1abaac\" (UID: \"df167c70-6bf6-4221-ac4d-fe967e1abaac\") " Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.812575 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df167c70-6bf6-4221-ac4d-fe967e1abaac-kube-api-access-bmv6p" (OuterVolumeSpecName: "kube-api-access-bmv6p") pod "df167c70-6bf6-4221-ac4d-fe967e1abaac" (UID: "df167c70-6bf6-4221-ac4d-fe967e1abaac"). InnerVolumeSpecName "kube-api-access-bmv6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.841109 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df167c70-6bf6-4221-ac4d-fe967e1abaac" (UID: "df167c70-6bf6-4221-ac4d-fe967e1abaac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.865874 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-config-data" (OuterVolumeSpecName: "config-data") pod "df167c70-6bf6-4221-ac4d-fe967e1abaac" (UID: "df167c70-6bf6-4221-ac4d-fe967e1abaac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.907234 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.907280 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df167c70-6bf6-4221-ac4d-fe967e1abaac-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:07.907327 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmv6p\" (UniqueName: \"kubernetes.io/projected/df167c70-6bf6-4221-ac4d-fe967e1abaac-kube-api-access-bmv6p\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.397330 4774 generic.go:334] "Generic (PLEG): container finished" podID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerID="6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc" exitCode=0 Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.397775 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" event={"ID":"bc13c923-40fe-4ddf-95af-f149f3d399ef","Type":"ContainerDied","Data":"6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc"} Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.397927 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" event={"ID":"bc13c923-40fe-4ddf-95af-f149f3d399ef","Type":"ContainerStarted","Data":"1334cd70c179064dc0f74063a2c26a3cf4bc9eee7fc076803345e9f80cabb71e"} Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.407931 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-mpj9v" event={"ID":"df167c70-6bf6-4221-ac4d-fe967e1abaac","Type":"ContainerDied","Data":"6c76dc60cf6c0f1b18d4daee225df50d8afcecaf403cbb30946e78e783e75ec4"} Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.407998 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c76dc60cf6c0f1b18d4daee225df50d8afcecaf403cbb30946e78e783e75ec4" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.408056 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-mpj9v" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.689910 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bd88855cf-mncl7"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.723647 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-bgsf8"] Nov 21 14:24:08 crc kubenswrapper[4774]: E1121 14:24:08.724255 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df167c70-6bf6-4221-ac4d-fe967e1abaac" containerName="keystone-db-sync" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.724278 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="df167c70-6bf6-4221-ac4d-fe967e1abaac" containerName="keystone-db-sync" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.724504 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="df167c70-6bf6-4221-ac4d-fe967e1abaac" containerName="keystone-db-sync" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.725371 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.733749 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.734015 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.734166 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.734341 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lxmk4" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.744943 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.748142 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bgsf8"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.824600 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cccdc99dc-g2fvl"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.826610 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.845460 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cccdc99dc-g2fvl"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.852376 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-fernet-keys\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.852458 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-scripts\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.852503 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-combined-ca-bundle\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.852537 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn4rg\" (UniqueName: \"kubernetes.io/projected/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-kube-api-access-wn4rg\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.852561 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-credential-keys\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.852645 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-config-data\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.940872 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-h86r9"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.942414 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.946706 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4zrw2" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.946707 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.947195 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.955948 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qc6ks\" (UniqueName: \"kubernetes.io/projected/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-kube-api-access-qc6ks\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956026 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-config\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956056 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-scripts\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956100 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-combined-ca-bundle\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956125 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn4rg\" (UniqueName: \"kubernetes.io/projected/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-kube-api-access-wn4rg\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956142 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-credential-keys\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956181 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-swift-storage-0\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956231 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-config-data\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956276 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-svc\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956305 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-sb\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956339 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-fernet-keys\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.956359 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-nb\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.963311 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-rtvlq"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.964669 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.987917 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-h86r9"] Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.995422 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.995530 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-scripts\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.995663 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-credential-keys\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.996313 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 21 14:24:08 crc kubenswrapper[4774]: I1121 14:24:08.996534 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rx2nk" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.009706 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-config-data\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.013697 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-combined-ca-bundle\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.017507 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rtvlq"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.019588 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-fernet-keys\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.019677 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn4rg\" (UniqueName: \"kubernetes.io/projected/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-kube-api-access-wn4rg\") pod \"keystone-bootstrap-bgsf8\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.055899 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-85xdf"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.057455 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058577 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-db-sync-config-data\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058641 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-combined-ca-bundle\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058673 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-config-data\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058705 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-config\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058786 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-svc\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058810 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-sb\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058858 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-combined-ca-bundle\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058886 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-nb\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058915 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qc6ks\" (UniqueName: \"kubernetes.io/projected/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-kube-api-access-qc6ks\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058935 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92400548-ccdd-4e2a-9da5-3aeef0628e31-etc-machine-id\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.058968 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-config\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.059021 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8c5m\" (UniqueName: \"kubernetes.io/projected/d14df5d7-bd15-499d-b228-e5b60f9f53fb-kube-api-access-s8c5m\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.059050 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cfd7\" (UniqueName: \"kubernetes.io/projected/92400548-ccdd-4e2a-9da5-3aeef0628e31-kube-api-access-6cfd7\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.059071 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-swift-storage-0\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.059104 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-scripts\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.060157 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-svc\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.060719 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-sb\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.061316 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.062090 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-nb\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.062363 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-config\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.062707 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-swift-storage-0\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.062966 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.070952 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-z2zgb" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.071208 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.094111 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-85xdf"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.099713 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qc6ks\" (UniqueName: \"kubernetes.io/projected/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-kube-api-access-qc6ks\") pod \"dnsmasq-dns-cccdc99dc-g2fvl\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.111944 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cccdc99dc-g2fvl"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.113007 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.160755 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-combined-ca-bundle\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162153 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92400548-ccdd-4e2a-9da5-3aeef0628e31-etc-machine-id\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162285 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-combined-ca-bundle\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162525 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-config-data\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162625 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8c5m\" (UniqueName: \"kubernetes.io/projected/d14df5d7-bd15-499d-b228-e5b60f9f53fb-kube-api-access-s8c5m\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162727 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cfd7\" (UniqueName: \"kubernetes.io/projected/92400548-ccdd-4e2a-9da5-3aeef0628e31-kube-api-access-6cfd7\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt57n\" (UniqueName: \"kubernetes.io/projected/dc5f9b69-3714-4aee-8d39-1618184dbb91-kube-api-access-mt57n\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.162949 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-scripts\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.163055 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-db-sync-config-data\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.163153 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-combined-ca-bundle\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.163599 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5f9b69-3714-4aee-8d39-1618184dbb91-logs\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.163727 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-config-data\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.163844 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-config\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.167441 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-scripts\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.164183 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92400548-ccdd-4e2a-9da5-3aeef0628e31-etc-machine-id\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.184943 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-combined-ca-bundle\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.190545 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.192923 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-db-sync-config-data\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.196921 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.198094 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-config-data\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.213140 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8c5m\" (UniqueName: \"kubernetes.io/projected/d14df5d7-bd15-499d-b228-e5b60f9f53fb-kube-api-access-s8c5m\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.213755 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.217608 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.225350 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-scripts\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.225476 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-config\") pod \"neutron-db-sync-rtvlq\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.226086 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-combined-ca-bundle\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.232943 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.233612 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cfd7\" (UniqueName: \"kubernetes.io/projected/92400548-ccdd-4e2a-9da5-3aeef0628e31-kube-api-access-6cfd7\") pod \"cinder-db-sync-h86r9\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.239497 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.258943 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.269846 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw5xq\" (UniqueName: \"kubernetes.io/projected/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-kube-api-access-cw5xq\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.269937 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-combined-ca-bundle\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.269977 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.270212 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-config-data\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271189 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt57n\" (UniqueName: \"kubernetes.io/projected/dc5f9b69-3714-4aee-8d39-1618184dbb91-kube-api-access-mt57n\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271247 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-log-httpd\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271379 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-run-httpd\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271444 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5f9b69-3714-4aee-8d39-1618184dbb91-logs\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271632 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271700 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-scripts\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271744 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-scripts\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.271806 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-config-data\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.273148 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5f9b69-3714-4aee-8d39-1618184dbb91-logs\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.277105 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-scripts\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.277647 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-668d98c7-wsjdv"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.281054 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.283413 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-combined-ca-bundle\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.302591 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-config-data\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.305557 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt57n\" (UniqueName: \"kubernetes.io/projected/dc5f9b69-3714-4aee-8d39-1618184dbb91-kube-api-access-mt57n\") pod \"placement-db-sync-85xdf\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.310281 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-668d98c7-wsjdv"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.338189 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-97g4j"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.339616 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.343710 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.345799 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4rpzt" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.362430 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-97g4j"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.373883 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-config-data\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.373943 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-sb\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.373972 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw5xq\" (UniqueName: \"kubernetes.io/projected/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-kube-api-access-cw5xq\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.374028 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8bxf\" (UniqueName: \"kubernetes.io/projected/8636adb4-79dc-4a9c-a152-1782cec88e5a-kube-api-access-n8bxf\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.374557 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376010 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-nb\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376085 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-svc\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376163 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-log-httpd\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376219 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-run-httpd\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376246 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-swift-storage-0\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376322 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-config\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376373 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376460 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-scripts\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.376596 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-log-httpd\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.377230 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-run-httpd\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.379071 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-config-data\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.393450 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.393629 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-scripts\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.395888 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.399707 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw5xq\" (UniqueName: \"kubernetes.io/projected/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-kube-api-access-cw5xq\") pod \"ceilometer-0\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.448115 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" event={"ID":"bc13c923-40fe-4ddf-95af-f149f3d399ef","Type":"ContainerStarted","Data":"9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb"} Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.448377 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerName="dnsmasq-dns" containerID="cri-o://9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb" gracePeriod=10 Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.451693 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478246 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-nb\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478318 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-svc\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478354 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-swift-storage-0\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478381 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-config\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478419 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vnxx\" (UniqueName: \"kubernetes.io/projected/a6996afa-3f45-411b-ac41-acf012c9c45e-kube-api-access-5vnxx\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478448 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-db-sync-config-data\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478499 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-sb\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478528 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8bxf\" (UniqueName: \"kubernetes.io/projected/8636adb4-79dc-4a9c-a152-1782cec88e5a-kube-api-access-n8bxf\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.478609 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-combined-ca-bundle\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.479805 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-nb\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.480387 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-svc\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.483480 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-config\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.485869 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-swift-storage-0\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.490280 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-sb\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.520042 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8bxf\" (UniqueName: \"kubernetes.io/projected/8636adb4-79dc-4a9c-a152-1782cec88e5a-kube-api-access-n8bxf\") pod \"dnsmasq-dns-668d98c7-wsjdv\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.571622 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.580329 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-combined-ca-bundle\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.580417 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vnxx\" (UniqueName: \"kubernetes.io/projected/a6996afa-3f45-411b-ac41-acf012c9c45e-kube-api-access-5vnxx\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.580438 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-db-sync-config-data\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.595413 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.597453 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-combined-ca-bundle\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.602935 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-db-sync-config-data\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.606387 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vnxx\" (UniqueName: \"kubernetes.io/projected/a6996afa-3f45-411b-ac41-acf012c9c45e-kube-api-access-5vnxx\") pod \"barbican-db-sync-97g4j\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.611342 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.670069 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.784945 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" podStartSLOduration=3.784926411 podStartE2EDuration="3.784926411s" podCreationTimestamp="2025-11-21 14:24:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:09.475530829 +0000 UTC m=+1240.127730088" watchObservedRunningTime="2025-11-21 14:24:09.784926411 +0000 UTC m=+1240.437125670" Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.815882 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cccdc99dc-g2fvl"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.839946 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-bgsf8"] Nov 21 14:24:09 crc kubenswrapper[4774]: I1121 14:24:09.946676 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-h86r9"] Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.144391 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rtvlq"] Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.257183 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-85xdf"] Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.351210 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.435150 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-swift-storage-0\") pod \"bc13c923-40fe-4ddf-95af-f149f3d399ef\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.435581 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-config\") pod \"bc13c923-40fe-4ddf-95af-f149f3d399ef\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.435615 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-svc\") pod \"bc13c923-40fe-4ddf-95af-f149f3d399ef\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.435681 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm8q9\" (UniqueName: \"kubernetes.io/projected/bc13c923-40fe-4ddf-95af-f149f3d399ef-kube-api-access-fm8q9\") pod \"bc13c923-40fe-4ddf-95af-f149f3d399ef\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.435758 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-nb\") pod \"bc13c923-40fe-4ddf-95af-f149f3d399ef\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.435871 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-sb\") pod \"bc13c923-40fe-4ddf-95af-f149f3d399ef\" (UID: \"bc13c923-40fe-4ddf-95af-f149f3d399ef\") " Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.484976 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc13c923-40fe-4ddf-95af-f149f3d399ef-kube-api-access-fm8q9" (OuterVolumeSpecName: "kube-api-access-fm8q9") pod "bc13c923-40fe-4ddf-95af-f149f3d399ef" (UID: "bc13c923-40fe-4ddf-95af-f149f3d399ef"). InnerVolumeSpecName "kube-api-access-fm8q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.494187 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-h86r9" event={"ID":"92400548-ccdd-4e2a-9da5-3aeef0628e31","Type":"ContainerStarted","Data":"5ce74033b17bea79b5d3b887f6a431a04c7af47ae0e2df022c210e9fc116c729"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.496082 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bgsf8" event={"ID":"3e6f62a8-a0c2-48e3-9a68-b3f20708d934","Type":"ContainerStarted","Data":"14fab7235defa82ecd90e890170fc0a5e111ffc9d7771cc63213ec0934fab19e"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.497201 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rtvlq" event={"ID":"d14df5d7-bd15-499d-b228-e5b60f9f53fb","Type":"ContainerStarted","Data":"b67aaf2cd1ee8ecc491edbc152086ecc3893fba73184e65a25e080199fbc83d6"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.497846 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.500397 4774 generic.go:334] "Generic (PLEG): container finished" podID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerID="9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb" exitCode=0 Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.500586 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" event={"ID":"bc13c923-40fe-4ddf-95af-f149f3d399ef","Type":"ContainerDied","Data":"9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.500677 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" event={"ID":"bc13c923-40fe-4ddf-95af-f149f3d399ef","Type":"ContainerDied","Data":"1334cd70c179064dc0f74063a2c26a3cf4bc9eee7fc076803345e9f80cabb71e"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.500702 4774 scope.go:117] "RemoveContainer" containerID="9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.500590 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bd88855cf-mncl7" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.504528 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" event={"ID":"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6","Type":"ContainerStarted","Data":"492b97f33ba5fabfeffd91ccb0d4ea9fe8bd5c5b19cd848377e7cf30d93e45fd"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.506286 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85xdf" event={"ID":"dc5f9b69-3714-4aee-8d39-1618184dbb91","Type":"ContainerStarted","Data":"ff272066e9cbe6c14d12d935a63ff1c315cbd400bb5cc20fb5c92aa809189ecd"} Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.537772 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm8q9\" (UniqueName: \"kubernetes.io/projected/bc13c923-40fe-4ddf-95af-f149f3d399ef-kube-api-access-fm8q9\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.589105 4774 scope.go:117] "RemoveContainer" containerID="6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.611401 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-668d98c7-wsjdv"] Nov 21 14:24:10 crc kubenswrapper[4774]: W1121 14:24:10.616429 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8636adb4_79dc_4a9c_a152_1782cec88e5a.slice/crio-56d62b55c2d01e9e24d8d2ab4b20f789d959bb158241e60d5e0b773f2bc9396c WatchSource:0}: Error finding container 56d62b55c2d01e9e24d8d2ab4b20f789d959bb158241e60d5e0b773f2bc9396c: Status 404 returned error can't find the container with id 56d62b55c2d01e9e24d8d2ab4b20f789d959bb158241e60d5e0b773f2bc9396c Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.626057 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-97g4j"] Nov 21 14:24:10 crc kubenswrapper[4774]: W1121 14:24:10.628471 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6996afa_3f45_411b_ac41_acf012c9c45e.slice/crio-6aba1d7c91bb27a267eedd066b951aaa84a590ed2f9a0f7eb3371e9429a1ad01 WatchSource:0}: Error finding container 6aba1d7c91bb27a267eedd066b951aaa84a590ed2f9a0f7eb3371e9429a1ad01: Status 404 returned error can't find the container with id 6aba1d7c91bb27a267eedd066b951aaa84a590ed2f9a0f7eb3371e9429a1ad01 Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.640744 4774 scope.go:117] "RemoveContainer" containerID="9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb" Nov 21 14:24:10 crc kubenswrapper[4774]: E1121 14:24:10.644344 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb\": container with ID starting with 9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb not found: ID does not exist" containerID="9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.644388 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb"} err="failed to get container status \"9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb\": rpc error: code = NotFound desc = could not find container \"9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb\": container with ID starting with 9678b2cb452fadcb179147b50a36b426bc295c9c13ec7017eadad5ec00a3bddb not found: ID does not exist" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.644417 4774 scope.go:117] "RemoveContainer" containerID="6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc" Nov 21 14:24:10 crc kubenswrapper[4774]: E1121 14:24:10.645678 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc\": container with ID starting with 6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc not found: ID does not exist" containerID="6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.645782 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc"} err="failed to get container status \"6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc\": rpc error: code = NotFound desc = could not find container \"6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc\": container with ID starting with 6ae465b363bd42aec2935d29aeacecb6a534229ef8b568762e1a15fbf2ab0ddc not found: ID does not exist" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.688135 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bc13c923-40fe-4ddf-95af-f149f3d399ef" (UID: "bc13c923-40fe-4ddf-95af-f149f3d399ef"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.708798 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bc13c923-40fe-4ddf-95af-f149f3d399ef" (UID: "bc13c923-40fe-4ddf-95af-f149f3d399ef"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.713836 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-config" (OuterVolumeSpecName: "config") pod "bc13c923-40fe-4ddf-95af-f149f3d399ef" (UID: "bc13c923-40fe-4ddf-95af-f149f3d399ef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.718681 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bc13c923-40fe-4ddf-95af-f149f3d399ef" (UID: "bc13c923-40fe-4ddf-95af-f149f3d399ef"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.730755 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc13c923-40fe-4ddf-95af-f149f3d399ef" (UID: "bc13c923-40fe-4ddf-95af-f149f3d399ef"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.742624 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.742674 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.742685 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.742694 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.742706 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc13c923-40fe-4ddf-95af-f149f3d399ef-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.883405 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bd88855cf-mncl7"] Nov 21 14:24:10 crc kubenswrapper[4774]: I1121 14:24:10.892077 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bd88855cf-mncl7"] Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.569908 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerStarted","Data":"393aec3156581563b37a77fe03b1c7e16daf1c74ca9252dfd144189fa6a586c4"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.596424 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bgsf8" event={"ID":"3e6f62a8-a0c2-48e3-9a68-b3f20708d934","Type":"ContainerStarted","Data":"21b7aeda05fe14e7e9c045015b25dd7fb41f27342b2b92959141887020d99f8f"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.619675 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rtvlq" event={"ID":"d14df5d7-bd15-499d-b228-e5b60f9f53fb","Type":"ContainerStarted","Data":"e39f09e4dfe399ff6aa725be530f4a13f8b5581beb78b8270bd3111542e5c9f3"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.632263 4774 generic.go:334] "Generic (PLEG): container finished" podID="d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" containerID="83d10a34b2a8054e43c36f5d7ff86c18c18a20aac79326dba896056573b489fb" exitCode=0 Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.632335 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" event={"ID":"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6","Type":"ContainerDied","Data":"83d10a34b2a8054e43c36f5d7ff86c18c18a20aac79326dba896056573b489fb"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.643535 4774 generic.go:334] "Generic (PLEG): container finished" podID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerID="2820a7163660ca7cde6894253d1fcc9f45862ba7db8d6c80957dd1761931f15d" exitCode=0 Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.643731 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" event={"ID":"8636adb4-79dc-4a9c-a152-1782cec88e5a","Type":"ContainerDied","Data":"2820a7163660ca7cde6894253d1fcc9f45862ba7db8d6c80957dd1761931f15d"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.643805 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" event={"ID":"8636adb4-79dc-4a9c-a152-1782cec88e5a","Type":"ContainerStarted","Data":"56d62b55c2d01e9e24d8d2ab4b20f789d959bb158241e60d5e0b773f2bc9396c"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.676343 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-97g4j" event={"ID":"a6996afa-3f45-411b-ac41-acf012c9c45e","Type":"ContainerStarted","Data":"6aba1d7c91bb27a267eedd066b951aaa84a590ed2f9a0f7eb3371e9429a1ad01"} Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.716894 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-bgsf8" podStartSLOduration=3.71685526 podStartE2EDuration="3.71685526s" podCreationTimestamp="2025-11-21 14:24:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:11.649350449 +0000 UTC m=+1242.301549698" watchObservedRunningTime="2025-11-21 14:24:11.71685526 +0000 UTC m=+1242.369054529" Nov 21 14:24:11 crc kubenswrapper[4774]: I1121 14:24:11.882049 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-rtvlq" podStartSLOduration=3.882020515 podStartE2EDuration="3.882020515s" podCreationTimestamp="2025-11-21 14:24:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:11.851161823 +0000 UTC m=+1242.503361082" watchObservedRunningTime="2025-11-21 14:24:11.882020515 +0000 UTC m=+1242.534219764" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.129712 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" path="/var/lib/kubelet/pods/bc13c923-40fe-4ddf-95af-f149f3d399ef/volumes" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.197180 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.305325 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.432890 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qc6ks\" (UniqueName: \"kubernetes.io/projected/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-kube-api-access-qc6ks\") pod \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.433043 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-sb\") pod \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.433247 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-svc\") pod \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.433297 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-swift-storage-0\") pod \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.433328 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-nb\") pod \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.433350 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-config\") pod \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\" (UID: \"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6\") " Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.444140 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-kube-api-access-qc6ks" (OuterVolumeSpecName: "kube-api-access-qc6ks") pod "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" (UID: "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6"). InnerVolumeSpecName "kube-api-access-qc6ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.469088 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-config" (OuterVolumeSpecName: "config") pod "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" (UID: "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.475142 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" (UID: "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.479269 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" (UID: "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.486110 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" (UID: "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.490721 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" (UID: "d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.537427 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qc6ks\" (UniqueName: \"kubernetes.io/projected/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-kube-api-access-qc6ks\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.537476 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.537489 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.537501 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.537513 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.537532 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.714385 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" event={"ID":"d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6","Type":"ContainerDied","Data":"492b97f33ba5fabfeffd91ccb0d4ea9fe8bd5c5b19cd848377e7cf30d93e45fd"} Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.714490 4774 scope.go:117] "RemoveContainer" containerID="83d10a34b2a8054e43c36f5d7ff86c18c18a20aac79326dba896056573b489fb" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.714731 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cccdc99dc-g2fvl" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.729979 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" event={"ID":"8636adb4-79dc-4a9c-a152-1782cec88e5a","Type":"ContainerStarted","Data":"c0c28c64d2df8c6456c0de25ebb577ad6365ac50839760f91d0117d546830c6b"} Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.731641 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.765428 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" podStartSLOduration=3.765402057 podStartE2EDuration="3.765402057s" podCreationTimestamp="2025-11-21 14:24:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:12.765254433 +0000 UTC m=+1243.417453692" watchObservedRunningTime="2025-11-21 14:24:12.765402057 +0000 UTC m=+1243.417601316" Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.834914 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cccdc99dc-g2fvl"] Nov 21 14:24:12 crc kubenswrapper[4774]: I1121 14:24:12.848902 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cccdc99dc-g2fvl"] Nov 21 14:24:14 crc kubenswrapper[4774]: I1121 14:24:14.105969 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" path="/var/lib/kubelet/pods/d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6/volumes" Nov 21 14:24:14 crc kubenswrapper[4774]: I1121 14:24:14.765756 4774 generic.go:334] "Generic (PLEG): container finished" podID="e3386949-a3f5-453c-953e-8deedb418d28" containerID="bb5a89d0866ada50c918454a8f38ceeea054b4043bfb044f638cc16eb0514685" exitCode=0 Nov 21 14:24:14 crc kubenswrapper[4774]: I1121 14:24:14.765872 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6wkpn" event={"ID":"e3386949-a3f5-453c-953e-8deedb418d28","Type":"ContainerDied","Data":"bb5a89d0866ada50c918454a8f38ceeea054b4043bfb044f638cc16eb0514685"} Nov 21 14:24:14 crc kubenswrapper[4774]: I1121 14:24:14.768828 4774 generic.go:334] "Generic (PLEG): container finished" podID="3e6f62a8-a0c2-48e3-9a68-b3f20708d934" containerID="21b7aeda05fe14e7e9c045015b25dd7fb41f27342b2b92959141887020d99f8f" exitCode=0 Nov 21 14:24:14 crc kubenswrapper[4774]: I1121 14:24:14.768875 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bgsf8" event={"ID":"3e6f62a8-a0c2-48e3-9a68-b3f20708d934","Type":"ContainerDied","Data":"21b7aeda05fe14e7e9c045015b25dd7fb41f27342b2b92959141887020d99f8f"} Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.127623 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6wkpn" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.192281 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-config-data\") pod \"e3386949-a3f5-453c-953e-8deedb418d28\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.192426 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-combined-ca-bundle\") pod \"e3386949-a3f5-453c-953e-8deedb418d28\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.192502 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-db-sync-config-data\") pod \"e3386949-a3f5-453c-953e-8deedb418d28\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.192660 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8jbv\" (UniqueName: \"kubernetes.io/projected/e3386949-a3f5-453c-953e-8deedb418d28-kube-api-access-r8jbv\") pod \"e3386949-a3f5-453c-953e-8deedb418d28\" (UID: \"e3386949-a3f5-453c-953e-8deedb418d28\") " Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.200648 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3386949-a3f5-453c-953e-8deedb418d28-kube-api-access-r8jbv" (OuterVolumeSpecName: "kube-api-access-r8jbv") pod "e3386949-a3f5-453c-953e-8deedb418d28" (UID: "e3386949-a3f5-453c-953e-8deedb418d28"). InnerVolumeSpecName "kube-api-access-r8jbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.209542 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e3386949-a3f5-453c-953e-8deedb418d28" (UID: "e3386949-a3f5-453c-953e-8deedb418d28"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.239208 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3386949-a3f5-453c-953e-8deedb418d28" (UID: "e3386949-a3f5-453c-953e-8deedb418d28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.260292 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-config-data" (OuterVolumeSpecName: "config-data") pod "e3386949-a3f5-453c-953e-8deedb418d28" (UID: "e3386949-a3f5-453c-953e-8deedb418d28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.294882 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8jbv\" (UniqueName: \"kubernetes.io/projected/e3386949-a3f5-453c-953e-8deedb418d28-kube-api-access-r8jbv\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.294927 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.294937 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.294947 4774 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e3386949-a3f5-453c-953e-8deedb418d28-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.820450 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-6wkpn" event={"ID":"e3386949-a3f5-453c-953e-8deedb418d28","Type":"ContainerDied","Data":"bffec3a69e8046d4c85de9307c27120b5fe8685f4e62837ccc08b099358372d2"} Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.820796 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bffec3a69e8046d4c85de9307c27120b5fe8685f4e62837ccc08b099358372d2" Nov 21 14:24:18 crc kubenswrapper[4774]: I1121 14:24:18.820891 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-6wkpn" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.614169 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.637658 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-668d98c7-wsjdv"] Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682220 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-77f579c57c-6xmzp"] Nov 21 14:24:19 crc kubenswrapper[4774]: E1121 14:24:19.682685 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" containerName="init" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682703 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" containerName="init" Nov 21 14:24:19 crc kubenswrapper[4774]: E1121 14:24:19.682723 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3386949-a3f5-453c-953e-8deedb418d28" containerName="glance-db-sync" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682735 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3386949-a3f5-453c-953e-8deedb418d28" containerName="glance-db-sync" Nov 21 14:24:19 crc kubenswrapper[4774]: E1121 14:24:19.682757 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerName="init" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682765 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerName="init" Nov 21 14:24:19 crc kubenswrapper[4774]: E1121 14:24:19.682780 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerName="dnsmasq-dns" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682786 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerName="dnsmasq-dns" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682964 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d41d4241-cd0c-4e3f-ae09-f6fbbb84eaf6" containerName="init" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.682990 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3386949-a3f5-453c-953e-8deedb418d28" containerName="glance-db-sync" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.683007 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc13c923-40fe-4ddf-95af-f149f3d399ef" containerName="dnsmasq-dns" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.691095 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.780951 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77f579c57c-6xmzp"] Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.837114 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-sb\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.837193 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-svc\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.837279 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-config\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.838690 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcmgw\" (UniqueName: \"kubernetes.io/projected/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-kube-api-access-hcmgw\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.838753 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-swift-storage-0\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.838800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-nb\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.879236 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="dnsmasq-dns" containerID="cri-o://c0c28c64d2df8c6456c0de25ebb577ad6365ac50839760f91d0117d546830c6b" gracePeriod=10 Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.940269 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-sb\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.940751 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-svc\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.940820 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-config\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.940903 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcmgw\" (UniqueName: \"kubernetes.io/projected/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-kube-api-access-hcmgw\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.940926 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-swift-storage-0\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.940947 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-nb\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.941879 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-nb\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.942441 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-config\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.943082 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-sb\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.943742 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-svc\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.961116 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-swift-storage-0\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:19 crc kubenswrapper[4774]: I1121 14:24:19.976313 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcmgw\" (UniqueName: \"kubernetes.io/projected/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-kube-api-access-hcmgw\") pod \"dnsmasq-dns-77f579c57c-6xmzp\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.033121 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.516663 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.518766 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.522253 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.524103 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-487jw" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.530576 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.532931 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653414 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-config-data\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653491 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653525 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653596 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-logs\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653623 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjq9t\" (UniqueName: \"kubernetes.io/projected/acc36aae-c9d5-40f4-b040-abbe85571c23-kube-api-access-tjq9t\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653710 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-scripts\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.653737 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756729 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-scripts\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756790 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756856 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-config-data\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756896 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756917 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756966 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-logs\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.756988 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjq9t\" (UniqueName: \"kubernetes.io/projected/acc36aae-c9d5-40f4-b040-abbe85571c23-kube-api-access-tjq9t\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.757684 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.757962 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.757994 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-logs\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.763302 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-scripts\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.763416 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.774790 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjq9t\" (UniqueName: \"kubernetes.io/projected/acc36aae-c9d5-40f4-b040-abbe85571c23-kube-api-access-tjq9t\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.782175 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-config-data\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.793172 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.889553 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.897455 4774 generic.go:334] "Generic (PLEG): container finished" podID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerID="c0c28c64d2df8c6456c0de25ebb577ad6365ac50839760f91d0117d546830c6b" exitCode=0 Nov 21 14:24:20 crc kubenswrapper[4774]: I1121 14:24:20.897528 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" event={"ID":"8636adb4-79dc-4a9c-a152-1782cec88e5a","Type":"ContainerDied","Data":"c0c28c64d2df8c6456c0de25ebb577ad6365ac50839760f91d0117d546830c6b"} Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.134633 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.137377 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.143333 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.155164 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.285797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.285896 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.285971 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5zvw\" (UniqueName: \"kubernetes.io/projected/153403d8-28c2-468b-9583-e66b4701ab5e-kube-api-access-h5zvw\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.286000 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.286075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.286103 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-logs\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.286130 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388401 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5zvw\" (UniqueName: \"kubernetes.io/projected/153403d8-28c2-468b-9583-e66b4701ab5e-kube-api-access-h5zvw\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388478 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388581 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388625 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388653 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-logs\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388753 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.388788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.389190 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.389643 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.389799 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-logs\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.402789 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.404540 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.407689 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5zvw\" (UniqueName: \"kubernetes.io/projected/153403d8-28c2-468b-9583-e66b4701ab5e-kube-api-access-h5zvw\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.415322 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.436542 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:21 crc kubenswrapper[4774]: I1121 14:24:21.474485 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.597585 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.725789 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-credential-keys\") pod \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.725917 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-scripts\") pod \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.725987 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-config-data\") pod \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.726236 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn4rg\" (UniqueName: \"kubernetes.io/projected/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-kube-api-access-wn4rg\") pod \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.726268 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-fernet-keys\") pod \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.726441 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-combined-ca-bundle\") pod \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\" (UID: \"3e6f62a8-a0c2-48e3-9a68-b3f20708d934\") " Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.731299 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-kube-api-access-wn4rg" (OuterVolumeSpecName: "kube-api-access-wn4rg") pod "3e6f62a8-a0c2-48e3-9a68-b3f20708d934" (UID: "3e6f62a8-a0c2-48e3-9a68-b3f20708d934"). InnerVolumeSpecName "kube-api-access-wn4rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.731661 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3e6f62a8-a0c2-48e3-9a68-b3f20708d934" (UID: "3e6f62a8-a0c2-48e3-9a68-b3f20708d934"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.735523 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-scripts" (OuterVolumeSpecName: "scripts") pod "3e6f62a8-a0c2-48e3-9a68-b3f20708d934" (UID: "3e6f62a8-a0c2-48e3-9a68-b3f20708d934"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.735743 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3e6f62a8-a0c2-48e3-9a68-b3f20708d934" (UID: "3e6f62a8-a0c2-48e3-9a68-b3f20708d934"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.761889 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e6f62a8-a0c2-48e3-9a68-b3f20708d934" (UID: "3e6f62a8-a0c2-48e3-9a68-b3f20708d934"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.763476 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-config-data" (OuterVolumeSpecName: "config-data") pod "3e6f62a8-a0c2-48e3-9a68-b3f20708d934" (UID: "3e6f62a8-a0c2-48e3-9a68-b3f20708d934"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.829282 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn4rg\" (UniqueName: \"kubernetes.io/projected/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-kube-api-access-wn4rg\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.829322 4774 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.829334 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.829343 4774 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.829352 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.829361 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e6f62a8-a0c2-48e3-9a68-b3f20708d934-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.854979 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.927917 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-bgsf8" event={"ID":"3e6f62a8-a0c2-48e3-9a68-b3f20708d934","Type":"ContainerDied","Data":"14fab7235defa82ecd90e890170fc0a5e111ffc9d7771cc63213ec0934fab19e"} Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.927978 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-bgsf8" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.927985 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14fab7235defa82ecd90e890170fc0a5e111ffc9d7771cc63213ec0934fab19e" Nov 21 14:24:22 crc kubenswrapper[4774]: I1121 14:24:22.967192 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.728702 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-bgsf8"] Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.739898 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-bgsf8"] Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.830404 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tjdsz"] Nov 21 14:24:23 crc kubenswrapper[4774]: E1121 14:24:23.831006 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e6f62a8-a0c2-48e3-9a68-b3f20708d934" containerName="keystone-bootstrap" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.831034 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e6f62a8-a0c2-48e3-9a68-b3f20708d934" containerName="keystone-bootstrap" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.831273 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e6f62a8-a0c2-48e3-9a68-b3f20708d934" containerName="keystone-bootstrap" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.832227 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.834134 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.834396 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lxmk4" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.834475 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.834475 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.835933 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.844868 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tjdsz"] Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.975704 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.975778 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-scripts\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.975964 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdm6g\" (UniqueName: \"kubernetes.io/projected/cf55e528-0d45-4c04-8f50-674d2b40625c-kube-api-access-sdm6g\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.976017 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-fernet-keys\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.976033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-credential-keys\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:23 crc kubenswrapper[4774]: I1121 14:24:23.976128 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.078055 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdm6g\" (UniqueName: \"kubernetes.io/projected/cf55e528-0d45-4c04-8f50-674d2b40625c-kube-api-access-sdm6g\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.078183 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-credential-keys\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.079300 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-fernet-keys\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.079448 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.079550 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.079592 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-scripts\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.090580 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-scripts\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.090762 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.090765 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-credential-keys\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.091012 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.092646 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-fernet-keys\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.098211 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdm6g\" (UniqueName: \"kubernetes.io/projected/cf55e528-0d45-4c04-8f50-674d2b40625c-kube-api-access-sdm6g\") pod \"keystone-bootstrap-tjdsz\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.112497 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e6f62a8-a0c2-48e3-9a68-b3f20708d934" path="/var/lib/kubelet/pods/3e6f62a8-a0c2-48e3-9a68-b3f20708d934/volumes" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.161052 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:24 crc kubenswrapper[4774]: I1121 14:24:24.614338 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: connect: connection refused" Nov 21 14:24:29 crc kubenswrapper[4774]: I1121 14:24:29.612584 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.140:5353: connect: connection refused" Nov 21 14:24:31 crc kubenswrapper[4774]: E1121 14:24:31.945744 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:25d0e71e0464df9502ad8bd3af0f73caeaca1bae11d89b4b5992b4fe712eda3a" Nov 21 14:24:31 crc kubenswrapper[4774]: E1121 14:24:31.946357 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:25d0e71e0464df9502ad8bd3af0f73caeaca1bae11d89b4b5992b4fe712eda3a,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6cfd7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-h86r9_openstack(92400548-ccdd-4e2a-9da5-3aeef0628e31): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:24:31 crc kubenswrapper[4774]: E1121 14:24:31.947577 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-h86r9" podUID="92400548-ccdd-4e2a-9da5-3aeef0628e31" Nov 21 14:24:32 crc kubenswrapper[4774]: E1121 14:24:32.040923 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api@sha256:25d0e71e0464df9502ad8bd3af0f73caeaca1bae11d89b4b5992b4fe712eda3a\\\"\"" pod="openstack/cinder-db-sync-h86r9" podUID="92400548-ccdd-4e2a-9da5-3aeef0628e31" Nov 21 14:24:32 crc kubenswrapper[4774]: E1121 14:24:32.427927 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:09513b0e7384092548dd654fa2356d64e243315cf59fa8857bd6c4a3ae4037c4" Nov 21 14:24:32 crc kubenswrapper[4774]: E1121 14:24:32.428483 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:09513b0e7384092548dd654fa2356d64e243315cf59fa8857bd6c4a3ae4037c4,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5vnxx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-97g4j_openstack(a6996afa-3f45-411b-ac41-acf012c9c45e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Nov 21 14:24:32 crc kubenswrapper[4774]: E1121 14:24:32.429695 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-97g4j" podUID="a6996afa-3f45-411b-ac41-acf012c9c45e" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.623314 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.791314 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-svc\") pod \"8636adb4-79dc-4a9c-a152-1782cec88e5a\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.792052 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-config\") pod \"8636adb4-79dc-4a9c-a152-1782cec88e5a\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.792165 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8bxf\" (UniqueName: \"kubernetes.io/projected/8636adb4-79dc-4a9c-a152-1782cec88e5a-kube-api-access-n8bxf\") pod \"8636adb4-79dc-4a9c-a152-1782cec88e5a\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.792216 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-sb\") pod \"8636adb4-79dc-4a9c-a152-1782cec88e5a\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.792266 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-swift-storage-0\") pod \"8636adb4-79dc-4a9c-a152-1782cec88e5a\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.793015 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-nb\") pod \"8636adb4-79dc-4a9c-a152-1782cec88e5a\" (UID: \"8636adb4-79dc-4a9c-a152-1782cec88e5a\") " Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.799141 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8636adb4-79dc-4a9c-a152-1782cec88e5a-kube-api-access-n8bxf" (OuterVolumeSpecName: "kube-api-access-n8bxf") pod "8636adb4-79dc-4a9c-a152-1782cec88e5a" (UID: "8636adb4-79dc-4a9c-a152-1782cec88e5a"). InnerVolumeSpecName "kube-api-access-n8bxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.854588 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8636adb4-79dc-4a9c-a152-1782cec88e5a" (UID: "8636adb4-79dc-4a9c-a152-1782cec88e5a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.855365 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8636adb4-79dc-4a9c-a152-1782cec88e5a" (UID: "8636adb4-79dc-4a9c-a152-1782cec88e5a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.862114 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8636adb4-79dc-4a9c-a152-1782cec88e5a" (UID: "8636adb4-79dc-4a9c-a152-1782cec88e5a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.867124 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8636adb4-79dc-4a9c-a152-1782cec88e5a" (UID: "8636adb4-79dc-4a9c-a152-1782cec88e5a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.876289 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-config" (OuterVolumeSpecName: "config") pod "8636adb4-79dc-4a9c-a152-1782cec88e5a" (UID: "8636adb4-79dc-4a9c-a152-1782cec88e5a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.895693 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.895752 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.895769 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8bxf\" (UniqueName: \"kubernetes.io/projected/8636adb4-79dc-4a9c-a152-1782cec88e5a-kube-api-access-n8bxf\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.895786 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.895803 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:32 crc kubenswrapper[4774]: I1121 14:24:32.895815 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8636adb4-79dc-4a9c-a152-1782cec88e5a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.021271 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-77f579c57c-6xmzp"] Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.047261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" event={"ID":"8636adb4-79dc-4a9c-a152-1782cec88e5a","Type":"ContainerDied","Data":"56d62b55c2d01e9e24d8d2ab4b20f789d959bb158241e60d5e0b773f2bc9396c"} Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.047301 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-668d98c7-wsjdv" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.047334 4774 scope.go:117] "RemoveContainer" containerID="c0c28c64d2df8c6456c0de25ebb577ad6365ac50839760f91d0117d546830c6b" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.049788 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85xdf" event={"ID":"dc5f9b69-3714-4aee-8d39-1618184dbb91","Type":"ContainerStarted","Data":"86fa94a80df28b54b4d5368a5a30fc434548f1c7d9fff7e5b31574d1e6c77717"} Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.067508 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" event={"ID":"2b08f504-86a8-4ba3-bea6-bb23f66be0c6","Type":"ContainerStarted","Data":"27475e0572274fac8ac1cdfc43553115c7f58f38a19304f603b638b1741b9704"} Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.076099 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerStarted","Data":"65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47"} Nov 21 14:24:33 crc kubenswrapper[4774]: E1121 14:24:33.084381 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api@sha256:09513b0e7384092548dd654fa2356d64e243315cf59fa8857bd6c4a3ae4037c4\\\"\"" pod="openstack/barbican-db-sync-97g4j" podUID="a6996afa-3f45-411b-ac41-acf012c9c45e" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.111528 4774 scope.go:117] "RemoveContainer" containerID="2820a7163660ca7cde6894253d1fcc9f45862ba7db8d6c80957dd1761931f15d" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.116126 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-85xdf" podStartSLOduration=1.950667755 podStartE2EDuration="24.116098894s" podCreationTimestamp="2025-11-21 14:24:09 +0000 UTC" firstStartedPulling="2025-11-21 14:24:10.271779533 +0000 UTC m=+1240.923978802" lastFinishedPulling="2025-11-21 14:24:32.437210682 +0000 UTC m=+1263.089409941" observedRunningTime="2025-11-21 14:24:33.107630339 +0000 UTC m=+1263.759829598" watchObservedRunningTime="2025-11-21 14:24:33.116098894 +0000 UTC m=+1263.768298153" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.191879 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-668d98c7-wsjdv"] Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.196730 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-668d98c7-wsjdv"] Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.230650 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tjdsz"] Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.246305 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 21 14:24:33 crc kubenswrapper[4774]: I1121 14:24:33.339846 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:33 crc kubenswrapper[4774]: W1121 14:24:33.361151 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacc36aae_c9d5_40f4_b040_abbe85571c23.slice/crio-8c6c49e521ad68ebebd7705d32eef61885e499fa886752ec35835e9243551702 WatchSource:0}: Error finding container 8c6c49e521ad68ebebd7705d32eef61885e499fa886752ec35835e9243551702: Status 404 returned error can't find the container with id 8c6c49e521ad68ebebd7705d32eef61885e499fa886752ec35835e9243551702 Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.098765 4774 generic.go:334] "Generic (PLEG): container finished" podID="d14df5d7-bd15-499d-b228-e5b60f9f53fb" containerID="e39f09e4dfe399ff6aa725be530f4a13f8b5581beb78b8270bd3111542e5c9f3" exitCode=0 Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.115084 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" path="/var/lib/kubelet/pods/8636adb4-79dc-4a9c-a152-1782cec88e5a/volumes" Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.121261 4774 generic.go:334] "Generic (PLEG): container finished" podID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerID="182685a75126ca5175831953404aa3287add2e6eb1afdce015cdc1fab6d2c408" exitCode=0 Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.127156 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tjdsz" event={"ID":"cf55e528-0d45-4c04-8f50-674d2b40625c","Type":"ContainerStarted","Data":"1271e61aa11802d7fb4ab4117cfddc4172f791cb66fb11ceca51b9d2cdc0afe1"} Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.127230 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tjdsz" event={"ID":"cf55e528-0d45-4c04-8f50-674d2b40625c","Type":"ContainerStarted","Data":"62a569e4cf2d79c374847f235bc36cb8c6bdfa18affd15f67311499eb255a5dd"} Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.127252 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rtvlq" event={"ID":"d14df5d7-bd15-499d-b228-e5b60f9f53fb","Type":"ContainerDied","Data":"e39f09e4dfe399ff6aa725be530f4a13f8b5581beb78b8270bd3111542e5c9f3"} Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.127272 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acc36aae-c9d5-40f4-b040-abbe85571c23","Type":"ContainerStarted","Data":"fc37a6d21047c4b2fb64d546cc2aae11f6809afd683fd6e3545c3adba4a2924b"} Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.127288 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acc36aae-c9d5-40f4-b040-abbe85571c23","Type":"ContainerStarted","Data":"8c6c49e521ad68ebebd7705d32eef61885e499fa886752ec35835e9243551702"} Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.127306 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" event={"ID":"2b08f504-86a8-4ba3-bea6-bb23f66be0c6","Type":"ContainerDied","Data":"182685a75126ca5175831953404aa3287add2e6eb1afdce015cdc1fab6d2c408"} Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.134291 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tjdsz" podStartSLOduration=11.134264121 podStartE2EDuration="11.134264121s" podCreationTimestamp="2025-11-21 14:24:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:34.116366354 +0000 UTC m=+1264.768565613" watchObservedRunningTime="2025-11-21 14:24:34.134264121 +0000 UTC m=+1264.786463380" Nov 21 14:24:34 crc kubenswrapper[4774]: I1121 14:24:34.179969 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.135004 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acc36aae-c9d5-40f4-b040-abbe85571c23","Type":"ContainerStarted","Data":"0ce0017fb9832863a5f233f1e7afe55c177ca4eaf20225d3c15e7aceb322efb7"} Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.135189 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-log" containerID="cri-o://fc37a6d21047c4b2fb64d546cc2aae11f6809afd683fd6e3545c3adba4a2924b" gracePeriod=30 Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.135647 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-httpd" containerID="cri-o://0ce0017fb9832863a5f233f1e7afe55c177ca4eaf20225d3c15e7aceb322efb7" gracePeriod=30 Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.141798 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" event={"ID":"2b08f504-86a8-4ba3-bea6-bb23f66be0c6","Type":"ContainerStarted","Data":"7f2235622587f3b15c99c819d06a54a06743a3c45e1c6a23a3878354dbf4b122"} Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.142020 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.146409 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerStarted","Data":"efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a"} Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.148581 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"153403d8-28c2-468b-9583-e66b4701ab5e","Type":"ContainerStarted","Data":"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca"} Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.148714 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"153403d8-28c2-468b-9583-e66b4701ab5e","Type":"ContainerStarted","Data":"c0f75f551a15b70735f4e71c1137102fed8d5bcce9bfb22a3132f3fdd31a05b6"} Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.177624 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=16.177605287 podStartE2EDuration="16.177605287s" podCreationTimestamp="2025-11-21 14:24:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:35.167552057 +0000 UTC m=+1265.819751316" watchObservedRunningTime="2025-11-21 14:24:35.177605287 +0000 UTC m=+1265.829804536" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.216579 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" podStartSLOduration=16.216551073 podStartE2EDuration="16.216551073s" podCreationTimestamp="2025-11-21 14:24:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:35.203515666 +0000 UTC m=+1265.855714945" watchObservedRunningTime="2025-11-21 14:24:35.216551073 +0000 UTC m=+1265.868750332" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.485388 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.573738 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-combined-ca-bundle\") pod \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.573876 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8c5m\" (UniqueName: \"kubernetes.io/projected/d14df5d7-bd15-499d-b228-e5b60f9f53fb-kube-api-access-s8c5m\") pod \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.574006 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-config\") pod \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\" (UID: \"d14df5d7-bd15-499d-b228-e5b60f9f53fb\") " Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.579385 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d14df5d7-bd15-499d-b228-e5b60f9f53fb-kube-api-access-s8c5m" (OuterVolumeSpecName: "kube-api-access-s8c5m") pod "d14df5d7-bd15-499d-b228-e5b60f9f53fb" (UID: "d14df5d7-bd15-499d-b228-e5b60f9f53fb"). InnerVolumeSpecName "kube-api-access-s8c5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.601993 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-config" (OuterVolumeSpecName: "config") pod "d14df5d7-bd15-499d-b228-e5b60f9f53fb" (UID: "d14df5d7-bd15-499d-b228-e5b60f9f53fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.602123 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d14df5d7-bd15-499d-b228-e5b60f9f53fb" (UID: "d14df5d7-bd15-499d-b228-e5b60f9f53fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.677942 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.678062 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d14df5d7-bd15-499d-b228-e5b60f9f53fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:35 crc kubenswrapper[4774]: I1121 14:24:35.678089 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8c5m\" (UniqueName: \"kubernetes.io/projected/d14df5d7-bd15-499d-b228-e5b60f9f53fb-kube-api-access-s8c5m\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.160635 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rtvlq" event={"ID":"d14df5d7-bd15-499d-b228-e5b60f9f53fb","Type":"ContainerDied","Data":"b67aaf2cd1ee8ecc491edbc152086ecc3893fba73184e65a25e080199fbc83d6"} Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.161048 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b67aaf2cd1ee8ecc491edbc152086ecc3893fba73184e65a25e080199fbc83d6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.160686 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rtvlq" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.163955 4774 generic.go:334] "Generic (PLEG): container finished" podID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerID="0ce0017fb9832863a5f233f1e7afe55c177ca4eaf20225d3c15e7aceb322efb7" exitCode=0 Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.163998 4774 generic.go:334] "Generic (PLEG): container finished" podID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerID="fc37a6d21047c4b2fb64d546cc2aae11f6809afd683fd6e3545c3adba4a2924b" exitCode=143 Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.164038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acc36aae-c9d5-40f4-b040-abbe85571c23","Type":"ContainerDied","Data":"0ce0017fb9832863a5f233f1e7afe55c177ca4eaf20225d3c15e7aceb322efb7"} Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.164084 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acc36aae-c9d5-40f4-b040-abbe85571c23","Type":"ContainerDied","Data":"fc37a6d21047c4b2fb64d546cc2aae11f6809afd683fd6e3545c3adba4a2924b"} Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.439799 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77f579c57c-6xmzp"] Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.495393 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-797dcf9445-xzkbz"] Nov 21 14:24:36 crc kubenswrapper[4774]: E1121 14:24:36.495849 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="dnsmasq-dns" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.495866 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="dnsmasq-dns" Nov 21 14:24:36 crc kubenswrapper[4774]: E1121 14:24:36.495877 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d14df5d7-bd15-499d-b228-e5b60f9f53fb" containerName="neutron-db-sync" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.495882 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d14df5d7-bd15-499d-b228-e5b60f9f53fb" containerName="neutron-db-sync" Nov 21 14:24:36 crc kubenswrapper[4774]: E1121 14:24:36.495913 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="init" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.495920 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="init" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.496110 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8636adb4-79dc-4a9c-a152-1782cec88e5a" containerName="dnsmasq-dns" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.496132 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d14df5d7-bd15-499d-b228-e5b60f9f53fb" containerName="neutron-db-sync" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.497199 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.532785 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5ffc8bc4bd-l7vz6"] Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.534523 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.540188 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.540427 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.540564 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.540694 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rx2nk" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.547505 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-797dcf9445-xzkbz"] Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.576160 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ffc8bc4bd-l7vz6"] Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598127 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-swift-storage-0\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598180 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-nb\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598232 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-sb\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598256 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-combined-ca-bundle\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598280 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-config\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598333 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmzqr\" (UniqueName: \"kubernetes.io/projected/5c606266-0467-4aeb-85ae-10f0643e09d2-kube-api-access-nmzqr\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598379 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-svc\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598409 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-ovndb-tls-certs\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598428 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fqvr\" (UniqueName: \"kubernetes.io/projected/f5ea34fd-2d79-4102-bf44-99ba443fc794-kube-api-access-6fqvr\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598460 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-httpd-config\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.598507 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-config\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701035 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmzqr\" (UniqueName: \"kubernetes.io/projected/5c606266-0467-4aeb-85ae-10f0643e09d2-kube-api-access-nmzqr\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701134 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-svc\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701180 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-ovndb-tls-certs\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701211 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fqvr\" (UniqueName: \"kubernetes.io/projected/f5ea34fd-2d79-4102-bf44-99ba443fc794-kube-api-access-6fqvr\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701262 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-httpd-config\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701328 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-config\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701393 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-swift-storage-0\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701425 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-nb\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701481 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-sb\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701519 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-combined-ca-bundle\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.701546 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-config\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.702653 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-nb\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.702813 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-swift-storage-0\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.703251 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-sb\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.704675 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-config\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.705078 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-svc\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.707512 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-config\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.710186 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-ovndb-tls-certs\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.713813 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-combined-ca-bundle\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.713905 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-httpd-config\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.724933 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fqvr\" (UniqueName: \"kubernetes.io/projected/f5ea34fd-2d79-4102-bf44-99ba443fc794-kube-api-access-6fqvr\") pod \"neutron-5ffc8bc4bd-l7vz6\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.732661 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmzqr\" (UniqueName: \"kubernetes.io/projected/5c606266-0467-4aeb-85ae-10f0643e09d2-kube-api-access-nmzqr\") pod \"dnsmasq-dns-797dcf9445-xzkbz\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.816031 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:36 crc kubenswrapper[4774]: I1121 14:24:36.870497 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:37 crc kubenswrapper[4774]: I1121 14:24:37.175450 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerName="dnsmasq-dns" containerID="cri-o://7f2235622587f3b15c99c819d06a54a06743a3c45e1c6a23a3878354dbf4b122" gracePeriod=10 Nov 21 14:24:37 crc kubenswrapper[4774]: I1121 14:24:37.369374 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-797dcf9445-xzkbz"] Nov 21 14:24:37 crc kubenswrapper[4774]: I1121 14:24:37.510687 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5ffc8bc4bd-l7vz6"] Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.237079 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ffc8bc4bd-l7vz6" event={"ID":"f5ea34fd-2d79-4102-bf44-99ba443fc794","Type":"ContainerStarted","Data":"29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.237747 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ffc8bc4bd-l7vz6" event={"ID":"f5ea34fd-2d79-4102-bf44-99ba443fc794","Type":"ContainerStarted","Data":"b7188a38fa19d1ee33f224da408204f164feed1a1d39cf72e229d126d304c455"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.249193 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc5f9b69-3714-4aee-8d39-1618184dbb91" containerID="86fa94a80df28b54b4d5368a5a30fc434548f1c7d9fff7e5b31574d1e6c77717" exitCode=0 Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.249744 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85xdf" event={"ID":"dc5f9b69-3714-4aee-8d39-1618184dbb91","Type":"ContainerDied","Data":"86fa94a80df28b54b4d5368a5a30fc434548f1c7d9fff7e5b31574d1e6c77717"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.253962 4774 generic.go:334] "Generic (PLEG): container finished" podID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerID="7f2235622587f3b15c99c819d06a54a06743a3c45e1c6a23a3878354dbf4b122" exitCode=0 Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.254031 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" event={"ID":"2b08f504-86a8-4ba3-bea6-bb23f66be0c6","Type":"ContainerDied","Data":"7f2235622587f3b15c99c819d06a54a06743a3c45e1c6a23a3878354dbf4b122"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.274119 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"153403d8-28c2-468b-9583-e66b4701ab5e","Type":"ContainerStarted","Data":"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.274342 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-log" containerID="cri-o://7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca" gracePeriod=30 Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.274700 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-httpd" containerID="cri-o://63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1" gracePeriod=30 Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.291700 4774 generic.go:334] "Generic (PLEG): container finished" podID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerID="1098b44e16a7dea1fd692b56bb336294c0c6ddbe1b030abb2d8ed9a0f0299526" exitCode=0 Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.291764 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" event={"ID":"5c606266-0467-4aeb-85ae-10f0643e09d2","Type":"ContainerDied","Data":"1098b44e16a7dea1fd692b56bb336294c0c6ddbe1b030abb2d8ed9a0f0299526"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.291798 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" event={"ID":"5c606266-0467-4aeb-85ae-10f0643e09d2","Type":"ContainerStarted","Data":"cbb305b74e1df5f109fc9aefef4c9f9af74f9c2b8ee629b775268ea171deec40"} Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.335196 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=18.3351612 podStartE2EDuration="18.3351612s" podCreationTimestamp="2025-11-21 14:24:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:38.31783928 +0000 UTC m=+1268.970038539" watchObservedRunningTime="2025-11-21 14:24:38.3351612 +0000 UTC m=+1268.987360459" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.380981 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.451667 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-svc\") pod \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.451762 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcmgw\" (UniqueName: \"kubernetes.io/projected/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-kube-api-access-hcmgw\") pod \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.451837 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-nb\") pod \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.451911 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-config\") pod \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.452020 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-swift-storage-0\") pod \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.452079 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-sb\") pod \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\" (UID: \"2b08f504-86a8-4ba3-bea6-bb23f66be0c6\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.487633 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-kube-api-access-hcmgw" (OuterVolumeSpecName: "kube-api-access-hcmgw") pod "2b08f504-86a8-4ba3-bea6-bb23f66be0c6" (UID: "2b08f504-86a8-4ba3-bea6-bb23f66be0c6"). InnerVolumeSpecName "kube-api-access-hcmgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.559462 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcmgw\" (UniqueName: \"kubernetes.io/projected/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-kube-api-access-hcmgw\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.560049 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-config" (OuterVolumeSpecName: "config") pod "2b08f504-86a8-4ba3-bea6-bb23f66be0c6" (UID: "2b08f504-86a8-4ba3-bea6-bb23f66be0c6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.560682 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2b08f504-86a8-4ba3-bea6-bb23f66be0c6" (UID: "2b08f504-86a8-4ba3-bea6-bb23f66be0c6"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.562541 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2b08f504-86a8-4ba3-bea6-bb23f66be0c6" (UID: "2b08f504-86a8-4ba3-bea6-bb23f66be0c6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.587098 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2b08f504-86a8-4ba3-bea6-bb23f66be0c6" (UID: "2b08f504-86a8-4ba3-bea6-bb23f66be0c6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.595798 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2b08f504-86a8-4ba3-bea6-bb23f66be0c6" (UID: "2b08f504-86a8-4ba3-bea6-bb23f66be0c6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.661536 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.661589 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.661603 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.661619 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.661629 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2b08f504-86a8-4ba3-bea6-bb23f66be0c6-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.693991 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763360 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763451 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-config-data\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763509 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-combined-ca-bundle\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763549 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-logs\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763583 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjq9t\" (UniqueName: \"kubernetes.io/projected/acc36aae-c9d5-40f4-b040-abbe85571c23-kube-api-access-tjq9t\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763643 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-httpd-run\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.763682 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-scripts\") pod \"acc36aae-c9d5-40f4-b040-abbe85571c23\" (UID: \"acc36aae-c9d5-40f4-b040-abbe85571c23\") " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.769651 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-logs" (OuterVolumeSpecName: "logs") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.774489 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.812010 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.820123 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-scripts" (OuterVolumeSpecName: "scripts") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.829346 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acc36aae-c9d5-40f4-b040-abbe85571c23-kube-api-access-tjq9t" (OuterVolumeSpecName: "kube-api-access-tjq9t") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "kube-api-access-tjq9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.869325 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.869368 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.869389 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.869398 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjq9t\" (UniqueName: \"kubernetes.io/projected/acc36aae-c9d5-40f4-b040-abbe85571c23-kube-api-access-tjq9t\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.869410 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acc36aae-c9d5-40f4-b040-abbe85571c23-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.914733 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.921299 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.956798 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-config-data" (OuterVolumeSpecName: "config-data") pod "acc36aae-c9d5-40f4-b040-abbe85571c23" (UID: "acc36aae-c9d5-40f4-b040-abbe85571c23"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.973215 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.973267 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:38 crc kubenswrapper[4774]: I1121 14:24:38.973281 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acc36aae-c9d5-40f4-b040-abbe85571c23-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.152764 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5546774f69-cpnh7"] Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.153315 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-httpd" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153338 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-httpd" Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.153358 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerName="init" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153365 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerName="init" Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.153381 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-log" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153387 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-log" Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.153401 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerName="dnsmasq-dns" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153407 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerName="dnsmasq-dns" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153592 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-log" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153624 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" containerName="dnsmasq-dns" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.153636 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" containerName="glance-httpd" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.160772 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.163473 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.164182 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178354 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-httpd-config\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178418 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnnfn\" (UniqueName: \"kubernetes.io/projected/d7a5f9e1-9167-418e-8e1e-57e645d31785-kube-api-access-mnnfn\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178464 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-public-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178482 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-combined-ca-bundle\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178511 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-ovndb-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178578 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-internal-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.178601 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-config\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.194525 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.198524 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5546774f69-cpnh7"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.278984 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-httpd-run\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279062 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-config-data\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279142 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279174 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-scripts\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279220 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-combined-ca-bundle\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279246 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-logs\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279266 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5zvw\" (UniqueName: \"kubernetes.io/projected/153403d8-28c2-468b-9583-e66b4701ab5e-kube-api-access-h5zvw\") pod \"153403d8-28c2-468b-9583-e66b4701ab5e\" (UID: \"153403d8-28c2-468b-9583-e66b4701ab5e\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279380 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-public-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279401 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-combined-ca-bundle\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279425 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-ovndb-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279474 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-internal-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279494 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-config\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279528 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-httpd-config\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279566 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnnfn\" (UniqueName: \"kubernetes.io/projected/d7a5f9e1-9167-418e-8e1e-57e645d31785-kube-api-access-mnnfn\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.279658 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.281359 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-logs" (OuterVolumeSpecName: "logs") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.290179 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-scripts" (OuterVolumeSpecName: "scripts") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.290167 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.299258 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-ovndb-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.302480 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/153403d8-28c2-468b-9583-e66b4701ab5e-kube-api-access-h5zvw" (OuterVolumeSpecName: "kube-api-access-h5zvw") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "kube-api-access-h5zvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.303995 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-httpd-config\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.304695 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-internal-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.304894 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-public-tls-certs\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.312559 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-combined-ca-bundle\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.313621 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"acc36aae-c9d5-40f4-b040-abbe85571c23","Type":"ContainerDied","Data":"8c6c49e521ad68ebebd7705d32eef61885e499fa886752ec35835e9243551702"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.313695 4774 scope.go:117] "RemoveContainer" containerID="0ce0017fb9832863a5f233f1e7afe55c177ca4eaf20225d3c15e7aceb322efb7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.313703 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.322922 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnnfn\" (UniqueName: \"kubernetes.io/projected/d7a5f9e1-9167-418e-8e1e-57e645d31785-kube-api-access-mnnfn\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.323793 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-config\") pod \"neutron-5546774f69-cpnh7\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.324596 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" event={"ID":"5c606266-0467-4aeb-85ae-10f0643e09d2","Type":"ContainerStarted","Data":"3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.324742 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.329768 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ffc8bc4bd-l7vz6" event={"ID":"f5ea34fd-2d79-4102-bf44-99ba443fc794","Type":"ContainerStarted","Data":"a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.329971 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.334277 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.337047 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-77f579c57c-6xmzp" event={"ID":"2b08f504-86a8-4ba3-bea6-bb23f66be0c6","Type":"ContainerDied","Data":"27475e0572274fac8ac1cdfc43553115c7f58f38a19304f603b638b1741b9704"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.347434 4774 generic.go:334] "Generic (PLEG): container finished" podID="153403d8-28c2-468b-9583-e66b4701ab5e" containerID="63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1" exitCode=0 Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.347469 4774 generic.go:334] "Generic (PLEG): container finished" podID="153403d8-28c2-468b-9583-e66b4701ab5e" containerID="7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca" exitCode=143 Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.347518 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"153403d8-28c2-468b-9583-e66b4701ab5e","Type":"ContainerDied","Data":"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.347549 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"153403d8-28c2-468b-9583-e66b4701ab5e","Type":"ContainerDied","Data":"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.347562 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"153403d8-28c2-468b-9583-e66b4701ab5e","Type":"ContainerDied","Data":"c0f75f551a15b70735f4e71c1137102fed8d5bcce9bfb22a3132f3fdd31a05b6"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.347622 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.351013 4774 generic.go:334] "Generic (PLEG): container finished" podID="cf55e528-0d45-4c04-8f50-674d2b40625c" containerID="1271e61aa11802d7fb4ab4117cfddc4172f791cb66fb11ceca51b9d2cdc0afe1" exitCode=0 Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.351154 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tjdsz" event={"ID":"cf55e528-0d45-4c04-8f50-674d2b40625c","Type":"ContainerDied","Data":"1271e61aa11802d7fb4ab4117cfddc4172f791cb66fb11ceca51b9d2cdc0afe1"} Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.361705 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.363597 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" podStartSLOduration=3.363567785 podStartE2EDuration="3.363567785s" podCreationTimestamp="2025-11-21 14:24:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:39.346134641 +0000 UTC m=+1269.998333900" watchObservedRunningTime="2025-11-21 14:24:39.363567785 +0000 UTC m=+1270.015767044" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.385485 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.385532 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.385544 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.385557 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.385573 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/153403d8-28c2-468b-9583-e66b4701ab5e-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.385584 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5zvw\" (UniqueName: \"kubernetes.io/projected/153403d8-28c2-468b-9583-e66b4701ab5e-kube-api-access-h5zvw\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.418099 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5ffc8bc4bd-l7vz6" podStartSLOduration=3.41807125 podStartE2EDuration="3.41807125s" podCreationTimestamp="2025-11-21 14:24:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:39.394406396 +0000 UTC m=+1270.046605655" watchObservedRunningTime="2025-11-21 14:24:39.41807125 +0000 UTC m=+1270.070270499" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.443529 4774 scope.go:117] "RemoveContainer" containerID="fc37a6d21047c4b2fb64d546cc2aae11f6809afd683fd6e3545c3adba4a2924b" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.448506 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-config-data" (OuterVolumeSpecName: "config-data") pod "153403d8-28c2-468b-9583-e66b4701ab5e" (UID: "153403d8-28c2-468b-9583-e66b4701ab5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.455293 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.461738 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.489538 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/153403d8-28c2-468b-9583-e66b4701ab5e-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.489573 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.504581 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.509144 4774 scope.go:117] "RemoveContainer" containerID="7f2235622587f3b15c99c819d06a54a06743a3c45e1c6a23a3878354dbf4b122" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.524977 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.535401 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-77f579c57c-6xmzp"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.555422 4774 scope.go:117] "RemoveContainer" containerID="182685a75126ca5175831953404aa3287add2e6eb1afdce015cdc1fab6d2c408" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.576810 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-77f579c57c-6xmzp"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.590811 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.591410 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-log" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.591427 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-log" Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.591453 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-httpd" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.591459 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-httpd" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.591659 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-httpd" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.591673 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" containerName="glance-log" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.592795 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.597959 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.597967 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.600777 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.621803 4774 scope.go:117] "RemoveContainer" containerID="63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.649540 4774 scope.go:117] "RemoveContainer" containerID="7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.692527 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.695991 4774 scope.go:117] "RemoveContainer" containerID="63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1" Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.699577 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1\": container with ID starting with 63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1 not found: ID does not exist" containerID="63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701030 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701064 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.700015 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1"} err="failed to get container status \"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1\": rpc error: code = NotFound desc = could not find container \"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1\": container with ID starting with 63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1 not found: ID does not exist" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701133 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4f5n\" (UniqueName: \"kubernetes.io/projected/0171c979-def5-4c7e-8551-cd40d008e88e-kube-api-access-x4f5n\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701173 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-logs\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701221 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701275 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701296 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701336 4774 scope.go:117] "RemoveContainer" containerID="7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.701434 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.709117 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: E1121 14:24:39.709984 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca\": container with ID starting with 7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca not found: ID does not exist" containerID="7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.710045 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca"} err="failed to get container status \"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca\": rpc error: code = NotFound desc = could not find container \"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca\": container with ID starting with 7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca not found: ID does not exist" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.710081 4774 scope.go:117] "RemoveContainer" containerID="63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.736456 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1"} err="failed to get container status \"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1\": rpc error: code = NotFound desc = could not find container \"63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1\": container with ID starting with 63e5452b029eaf4f5a3d0a31694fee137e8d1db28e2cf077372d53fcec1bfaf1 not found: ID does not exist" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.736510 4774 scope.go:117] "RemoveContainer" containerID="7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.739359 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca"} err="failed to get container status \"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca\": rpc error: code = NotFound desc = could not find container \"7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca\": container with ID starting with 7651ae98029193303538f9d9cacb77968e7da27c3b1590c2d35cbaac2c801aca not found: ID does not exist" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.788215 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.798636 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.801583 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803372 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803489 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803520 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803568 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4f5n\" (UniqueName: \"kubernetes.io/projected/0171c979-def5-4c7e-8551-cd40d008e88e-kube-api-access-x4f5n\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803609 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-logs\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803646 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803684 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.803707 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.804257 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.804341 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.804861 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.805194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-logs\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.805271 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.813792 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.815611 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.833131 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.838969 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4f5n\" (UniqueName: \"kubernetes.io/projected/0171c979-def5-4c7e-8551-cd40d008e88e-kube-api-access-x4f5n\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.843761 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.849811 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.904857 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt57n\" (UniqueName: \"kubernetes.io/projected/dc5f9b69-3714-4aee-8d39-1618184dbb91-kube-api-access-mt57n\") pod \"dc5f9b69-3714-4aee-8d39-1618184dbb91\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.904904 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-combined-ca-bundle\") pod \"dc5f9b69-3714-4aee-8d39-1618184dbb91\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.904953 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-scripts\") pod \"dc5f9b69-3714-4aee-8d39-1618184dbb91\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905075 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-config-data\") pod \"dc5f9b69-3714-4aee-8d39-1618184dbb91\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905193 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5f9b69-3714-4aee-8d39-1618184dbb91-logs\") pod \"dc5f9b69-3714-4aee-8d39-1618184dbb91\" (UID: \"dc5f9b69-3714-4aee-8d39-1618184dbb91\") " Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905479 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-logs\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905591 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905668 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zj4w\" (UniqueName: \"kubernetes.io/projected/aecb7435-3e87-4623-ad69-f322836314a3-kube-api-access-6zj4w\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905751 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905895 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.905955 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.907961 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.909503 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc5f9b69-3714-4aee-8d39-1618184dbb91-logs" (OuterVolumeSpecName: "logs") pod "dc5f9b69-3714-4aee-8d39-1618184dbb91" (UID: "dc5f9b69-3714-4aee-8d39-1618184dbb91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.912709 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc5f9b69-3714-4aee-8d39-1618184dbb91-kube-api-access-mt57n" (OuterVolumeSpecName: "kube-api-access-mt57n") pod "dc5f9b69-3714-4aee-8d39-1618184dbb91" (UID: "dc5f9b69-3714-4aee-8d39-1618184dbb91"). InnerVolumeSpecName "kube-api-access-mt57n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.915328 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-scripts" (OuterVolumeSpecName: "scripts") pod "dc5f9b69-3714-4aee-8d39-1618184dbb91" (UID: "dc5f9b69-3714-4aee-8d39-1618184dbb91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.923444 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.954021 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-config-data" (OuterVolumeSpecName: "config-data") pod "dc5f9b69-3714-4aee-8d39-1618184dbb91" (UID: "dc5f9b69-3714-4aee-8d39-1618184dbb91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:39 crc kubenswrapper[4774]: I1121 14:24:39.987938 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc5f9b69-3714-4aee-8d39-1618184dbb91" (UID: "dc5f9b69-3714-4aee-8d39-1618184dbb91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013180 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013236 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013284 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-logs\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013336 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013366 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013403 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zj4w\" (UniqueName: \"kubernetes.io/projected/aecb7435-3e87-4623-ad69-f322836314a3-kube-api-access-6zj4w\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013452 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013468 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013521 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dc5f9b69-3714-4aee-8d39-1618184dbb91-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013536 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt57n\" (UniqueName: \"kubernetes.io/projected/dc5f9b69-3714-4aee-8d39-1618184dbb91-kube-api-access-mt57n\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013545 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013553 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.013562 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc5f9b69-3714-4aee-8d39-1618184dbb91-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.018033 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.018481 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.021028 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.021283 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-logs\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.036976 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.039579 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.046211 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.047117 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zj4w\" (UniqueName: \"kubernetes.io/projected/aecb7435-3e87-4623-ad69-f322836314a3-kube-api-access-6zj4w\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.062501 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: W1121 14:24:40.124011 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7a5f9e1_9167_418e_8e1e_57e645d31785.slice/crio-ad6ea00c94a7c73844bf4ddf0266295efee38366f85c5ecca5db0df858b2ff39 WatchSource:0}: Error finding container ad6ea00c94a7c73844bf4ddf0266295efee38366f85c5ecca5db0df858b2ff39: Status 404 returned error can't find the container with id ad6ea00c94a7c73844bf4ddf0266295efee38366f85c5ecca5db0df858b2ff39 Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.128014 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="153403d8-28c2-468b-9583-e66b4701ab5e" path="/var/lib/kubelet/pods/153403d8-28c2-468b-9583-e66b4701ab5e/volumes" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.131322 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b08f504-86a8-4ba3-bea6-bb23f66be0c6" path="/var/lib/kubelet/pods/2b08f504-86a8-4ba3-bea6-bb23f66be0c6/volumes" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.132016 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acc36aae-c9d5-40f4-b040-abbe85571c23" path="/var/lib/kubelet/pods/acc36aae-c9d5-40f4-b040-abbe85571c23/volumes" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.133568 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5546774f69-cpnh7"] Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.164605 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.429693 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5546774f69-cpnh7" event={"ID":"d7a5f9e1-9167-418e-8e1e-57e645d31785","Type":"ContainerStarted","Data":"ad6ea00c94a7c73844bf4ddf0266295efee38366f85c5ecca5db0df858b2ff39"} Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.453648 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8d9694746-ctlgk"] Nov 21 14:24:40 crc kubenswrapper[4774]: E1121 14:24:40.454261 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc5f9b69-3714-4aee-8d39-1618184dbb91" containerName="placement-db-sync" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.454279 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc5f9b69-3714-4aee-8d39-1618184dbb91" containerName="placement-db-sync" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.454503 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc5f9b69-3714-4aee-8d39-1618184dbb91" containerName="placement-db-sync" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.455688 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.459674 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.460153 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.471586 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-85xdf" event={"ID":"dc5f9b69-3714-4aee-8d39-1618184dbb91","Type":"ContainerDied","Data":"ff272066e9cbe6c14d12d935a63ff1c315cbd400bb5cc20fb5c92aa809189ecd"} Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.471643 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff272066e9cbe6c14d12d935a63ff1c315cbd400bb5cc20fb5c92aa809189ecd" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.471700 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-85xdf" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.476262 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8d9694746-ctlgk"] Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.528346 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-scripts\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.528415 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-combined-ca-bundle\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.528507 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-config-data\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.528541 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgqqf\" (UniqueName: \"kubernetes.io/projected/204761da-3cd3-4024-8268-2c4ade77be70-kube-api-access-kgqqf\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.528569 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-public-tls-certs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.528599 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/204761da-3cd3-4024-8268-2c4ade77be70-logs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.529951 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-internal-tls-certs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.574607 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632519 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-config-data\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632582 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgqqf\" (UniqueName: \"kubernetes.io/projected/204761da-3cd3-4024-8268-2c4ade77be70-kube-api-access-kgqqf\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632620 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-public-tls-certs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632647 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/204761da-3cd3-4024-8268-2c4ade77be70-logs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632673 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-internal-tls-certs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632755 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-scripts\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.632795 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-combined-ca-bundle\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.636251 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/204761da-3cd3-4024-8268-2c4ade77be70-logs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.637681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-config-data\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.637927 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-combined-ca-bundle\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.641442 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-internal-tls-certs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.642598 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-public-tls-certs\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.651194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-scripts\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.658751 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgqqf\" (UniqueName: \"kubernetes.io/projected/204761da-3cd3-4024-8268-2c4ade77be70-kube-api-access-kgqqf\") pod \"placement-8d9694746-ctlgk\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.791465 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.797633 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:24:40 crc kubenswrapper[4774]: W1121 14:24:40.844333 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaecb7435_3e87_4623_ad69_f322836314a3.slice/crio-c60e0e2635bc551e26a5ae76aa252b3d39377a808c32413334fadefa0ac388a9 WatchSource:0}: Error finding container c60e0e2635bc551e26a5ae76aa252b3d39377a808c32413334fadefa0ac388a9: Status 404 returned error can't find the container with id c60e0e2635bc551e26a5ae76aa252b3d39377a808c32413334fadefa0ac388a9 Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.874621 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.952717 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.952872 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.952998 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-scripts\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.953059 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdm6g\" (UniqueName: \"kubernetes.io/projected/cf55e528-0d45-4c04-8f50-674d2b40625c-kube-api-access-sdm6g\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.953108 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-fernet-keys\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.953130 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-credential-keys\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.977321 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.985154 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf55e528-0d45-4c04-8f50-674d2b40625c-kube-api-access-sdm6g" (OuterVolumeSpecName: "kube-api-access-sdm6g") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "kube-api-access-sdm6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:40 crc kubenswrapper[4774]: I1121 14:24:40.985298 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:40.997894 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-scripts" (OuterVolumeSpecName: "scripts") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.064220 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.066950 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data" (OuterVolumeSpecName: "config-data") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.067610 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.067729 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle\") pod \"cf55e528-0d45-4c04-8f50-674d2b40625c\" (UID: \"cf55e528-0d45-4c04-8f50-674d2b40625c\") " Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.068739 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.068763 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdm6g\" (UniqueName: \"kubernetes.io/projected/cf55e528-0d45-4c04-8f50-674d2b40625c-kube-api-access-sdm6g\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.068779 4774 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.068792 4774 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:41 crc kubenswrapper[4774]: W1121 14:24:41.068900 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/cf55e528-0d45-4c04-8f50-674d2b40625c/volumes/kubernetes.io~secret/combined-ca-bundle Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.068913 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:41 crc kubenswrapper[4774]: W1121 14:24:41.068959 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/cf55e528-0d45-4c04-8f50-674d2b40625c/volumes/kubernetes.io~secret/config-data Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.068968 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data" (OuterVolumeSpecName: "config-data") pod "cf55e528-0d45-4c04-8f50-674d2b40625c" (UID: "cf55e528-0d45-4c04-8f50-674d2b40625c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.170445 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.170492 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf55e528-0d45-4c04-8f50-674d2b40625c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.477026 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8d9694746-ctlgk"] Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.496095 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5546774f69-cpnh7" event={"ID":"d7a5f9e1-9167-418e-8e1e-57e645d31785","Type":"ContainerStarted","Data":"1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28"} Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.496148 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5546774f69-cpnh7" event={"ID":"d7a5f9e1-9167-418e-8e1e-57e645d31785","Type":"ContainerStarted","Data":"eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7"} Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.496258 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.514152 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0171c979-def5-4c7e-8551-cd40d008e88e","Type":"ContainerStarted","Data":"1ae4a65c111b21a69f52bc649b89dbe7c6443a9ab8b9a9bbd35c9f4df8f8e32b"} Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.522525 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"aecb7435-3e87-4623-ad69-f322836314a3","Type":"ContainerStarted","Data":"c60e0e2635bc551e26a5ae76aa252b3d39377a808c32413334fadefa0ac388a9"} Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.544354 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tjdsz" event={"ID":"cf55e528-0d45-4c04-8f50-674d2b40625c","Type":"ContainerDied","Data":"62a569e4cf2d79c374847f235bc36cb8c6bdfa18affd15f67311499eb255a5dd"} Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.544412 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62a569e4cf2d79c374847f235bc36cb8c6bdfa18affd15f67311499eb255a5dd" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.544508 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tjdsz" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.579059 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b76744b8b-5ws6g"] Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.579222 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5546774f69-cpnh7" podStartSLOduration=2.579195782 podStartE2EDuration="2.579195782s" podCreationTimestamp="2025-11-21 14:24:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:41.538841956 +0000 UTC m=+1272.191041215" watchObservedRunningTime="2025-11-21 14:24:41.579195782 +0000 UTC m=+1272.231395041" Nov 21 14:24:41 crc kubenswrapper[4774]: E1121 14:24:41.579711 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf55e528-0d45-4c04-8f50-674d2b40625c" containerName="keystone-bootstrap" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.579736 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf55e528-0d45-4c04-8f50-674d2b40625c" containerName="keystone-bootstrap" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.580056 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf55e528-0d45-4c04-8f50-674d2b40625c" containerName="keystone-bootstrap" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.580976 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.586877 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.586946 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.587087 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.587262 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-lxmk4" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.588235 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.588472 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.667076 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b76744b8b-5ws6g"] Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.714617 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-config-data\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.735537 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-credential-keys\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.735608 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-public-tls-certs\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.735753 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-combined-ca-bundle\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.736105 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-internal-tls-certs\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.736156 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-scripts\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.736292 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntwsc\" (UniqueName: \"kubernetes.io/projected/57cdbc4f-20e9-4189-872d-f6f3c58f7093-kube-api-access-ntwsc\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.736355 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-fernet-keys\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845304 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-combined-ca-bundle\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845403 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-internal-tls-certs\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845436 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-scripts\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845503 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntwsc\" (UniqueName: \"kubernetes.io/projected/57cdbc4f-20e9-4189-872d-f6f3c58f7093-kube-api-access-ntwsc\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845532 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-fernet-keys\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845640 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-config-data\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845671 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-credential-keys\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.845695 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-public-tls-certs\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.856602 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-internal-tls-certs\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.861522 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-public-tls-certs\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.862637 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-config-data\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.866313 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-scripts\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.866540 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-credential-keys\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.867114 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-combined-ca-bundle\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.871674 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-fernet-keys\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.880581 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntwsc\" (UniqueName: \"kubernetes.io/projected/57cdbc4f-20e9-4189-872d-f6f3c58f7093-kube-api-access-ntwsc\") pod \"keystone-b76744b8b-5ws6g\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:41 crc kubenswrapper[4774]: I1121 14:24:41.927010 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:42 crc kubenswrapper[4774]: I1121 14:24:42.567536 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0171c979-def5-4c7e-8551-cd40d008e88e","Type":"ContainerStarted","Data":"d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d"} Nov 21 14:24:42 crc kubenswrapper[4774]: I1121 14:24:42.569592 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b76744b8b-5ws6g"] Nov 21 14:24:42 crc kubenswrapper[4774]: I1121 14:24:42.570000 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9694746-ctlgk" event={"ID":"204761da-3cd3-4024-8268-2c4ade77be70","Type":"ContainerStarted","Data":"e1bece3865eab576d0537b2757e1996d3eb563738ed1cbdb0bbc09abd23a1ae4"} Nov 21 14:24:42 crc kubenswrapper[4774]: I1121 14:24:42.570036 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9694746-ctlgk" event={"ID":"204761da-3cd3-4024-8268-2c4ade77be70","Type":"ContainerStarted","Data":"2cc7c80b89b553cb23dac5a76f39501cb8a1850828a8f174e0938896be6dac43"} Nov 21 14:24:42 crc kubenswrapper[4774]: I1121 14:24:42.575862 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"aecb7435-3e87-4623-ad69-f322836314a3","Type":"ContainerStarted","Data":"9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514"} Nov 21 14:24:43 crc kubenswrapper[4774]: I1121 14:24:43.588082 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0171c979-def5-4c7e-8551-cd40d008e88e","Type":"ContainerStarted","Data":"0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc"} Nov 21 14:24:43 crc kubenswrapper[4774]: I1121 14:24:43.627231 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.627203687 podStartE2EDuration="4.627203687s" podCreationTimestamp="2025-11-21 14:24:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:43.609754952 +0000 UTC m=+1274.261954231" watchObservedRunningTime="2025-11-21 14:24:43.627203687 +0000 UTC m=+1274.279402936" Nov 21 14:24:46 crc kubenswrapper[4774]: I1121 14:24:46.818547 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:24:46 crc kubenswrapper[4774]: I1121 14:24:46.900752 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdfc8db59-wsd6r"] Nov 21 14:24:46 crc kubenswrapper[4774]: I1121 14:24:46.901160 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerName="dnsmasq-dns" containerID="cri-o://0f1f36be6c3f4241aee49f35177b450a1aa8f641aa4a6715c401f8e36d1ce504" gracePeriod=10 Nov 21 14:24:47 crc kubenswrapper[4774]: I1121 14:24:47.630946 4774 generic.go:334] "Generic (PLEG): container finished" podID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerID="0f1f36be6c3f4241aee49f35177b450a1aa8f641aa4a6715c401f8e36d1ce504" exitCode=0 Nov 21 14:24:47 crc kubenswrapper[4774]: I1121 14:24:47.631363 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" event={"ID":"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3","Type":"ContainerDied","Data":"0f1f36be6c3f4241aee49f35177b450a1aa8f641aa4a6715c401f8e36d1ce504"} Nov 21 14:24:48 crc kubenswrapper[4774]: W1121 14:24:48.878852 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57cdbc4f_20e9_4189_872d_f6f3c58f7093.slice/crio-599f526dc1ff79383e207e6cde0b3433691bbb4e2791ef2e3034c9cdbdf138ff WatchSource:0}: Error finding container 599f526dc1ff79383e207e6cde0b3433691bbb4e2791ef2e3034c9cdbdf138ff: Status 404 returned error can't find the container with id 599f526dc1ff79383e207e6cde0b3433691bbb4e2791ef2e3034c9cdbdf138ff Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.242052 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.416438 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-nb\") pod \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.416580 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-sb\") pod \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.416741 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx7ml\" (UniqueName: \"kubernetes.io/projected/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-kube-api-access-gx7ml\") pod \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.416804 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-config\") pod \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.416877 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-dns-svc\") pod \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\" (UID: \"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3\") " Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.442183 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-kube-api-access-gx7ml" (OuterVolumeSpecName: "kube-api-access-gx7ml") pod "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" (UID: "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3"). InnerVolumeSpecName "kube-api-access-gx7ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.496224 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" (UID: "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.504793 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" (UID: "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.507136 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-config" (OuterVolumeSpecName: "config") pod "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" (UID: "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.520723 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx7ml\" (UniqueName: \"kubernetes.io/projected/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-kube-api-access-gx7ml\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.520770 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.520783 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.520792 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.521027 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" (UID: "e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.623214 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.670692 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-97g4j" event={"ID":"a6996afa-3f45-411b-ac41-acf012c9c45e","Type":"ContainerStarted","Data":"61597ca4b4483f6ed3cd70ca3eb4e3b66b8c2f1e1c119be77516f9b9aec25720"} Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.703576 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerStarted","Data":"c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f"} Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.711481 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b76744b8b-5ws6g" event={"ID":"57cdbc4f-20e9-4189-872d-f6f3c58f7093","Type":"ContainerStarted","Data":"aea70590e231f1b48851f4ffa1e6852272819cf991813022fe7bff259b0f4d04"} Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.712045 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b76744b8b-5ws6g" event={"ID":"57cdbc4f-20e9-4189-872d-f6f3c58f7093","Type":"ContainerStarted","Data":"599f526dc1ff79383e207e6cde0b3433691bbb4e2791ef2e3034c9cdbdf138ff"} Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.713792 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.728406 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.728399 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7bdfc8db59-wsd6r" event={"ID":"e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3","Type":"ContainerDied","Data":"b137ab21c8f96f0329ab227b5b1b5a39f1682bafe3ad34e07a1cf7dc5bcff663"} Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.728479 4774 scope.go:117] "RemoveContainer" containerID="0f1f36be6c3f4241aee49f35177b450a1aa8f641aa4a6715c401f8e36d1ce504" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.747353 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-b76744b8b-5ws6g" podStartSLOduration=8.747319827 podStartE2EDuration="8.747319827s" podCreationTimestamp="2025-11-21 14:24:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:49.74605097 +0000 UTC m=+1280.398250239" watchObservedRunningTime="2025-11-21 14:24:49.747319827 +0000 UTC m=+1280.399519096" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.751094 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-97g4j" podStartSLOduration=2.327475346 podStartE2EDuration="40.751062285s" podCreationTimestamp="2025-11-21 14:24:09 +0000 UTC" firstStartedPulling="2025-11-21 14:24:10.641072337 +0000 UTC m=+1241.293271596" lastFinishedPulling="2025-11-21 14:24:49.064659276 +0000 UTC m=+1279.716858535" observedRunningTime="2025-11-21 14:24:49.697653401 +0000 UTC m=+1280.349852680" watchObservedRunningTime="2025-11-21 14:24:49.751062285 +0000 UTC m=+1280.403261544" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.763563 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9694746-ctlgk" event={"ID":"204761da-3cd3-4024-8268-2c4ade77be70","Type":"ContainerStarted","Data":"cc1d60dd83d00832b380eb3c950ba9940eb8e75dc9cfe60f03f0990330129de2"} Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.765337 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.765374 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.799878 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7bdfc8db59-wsd6r"] Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.818135 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7bdfc8db59-wsd6r"] Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.833194 4774 scope.go:117] "RemoveContainer" containerID="04e64de391eb167cd118a4c5f0463876c76525463e056fc9e5d1b11fa488e936" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.838936 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-8d9694746-ctlgk" podStartSLOduration=9.838914144 podStartE2EDuration="9.838914144s" podCreationTimestamp="2025-11-21 14:24:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:49.81904379 +0000 UTC m=+1280.471243059" watchObservedRunningTime="2025-11-21 14:24:49.838914144 +0000 UTC m=+1280.491113393" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.924602 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.926158 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.980866 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 14:24:49 crc kubenswrapper[4774]: I1121 14:24:49.991991 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.113862 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" path="/var/lib/kubelet/pods/e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3/volumes" Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.778730 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-h86r9" event={"ID":"92400548-ccdd-4e2a-9da5-3aeef0628e31","Type":"ContainerStarted","Data":"b74c59ecf9ca863931ef02c75fc057e4b9cf2d307851a4820767d23b14d83360"} Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.781539 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"aecb7435-3e87-4623-ad69-f322836314a3","Type":"ContainerStarted","Data":"5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa"} Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.781855 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.781941 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.799537 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-h86r9" podStartSLOduration=3.769248985 podStartE2EDuration="42.799518589s" podCreationTimestamp="2025-11-21 14:24:08 +0000 UTC" firstStartedPulling="2025-11-21 14:24:10.016775843 +0000 UTC m=+1240.668975102" lastFinishedPulling="2025-11-21 14:24:49.047045447 +0000 UTC m=+1279.699244706" observedRunningTime="2025-11-21 14:24:50.799485978 +0000 UTC m=+1281.451685237" watchObservedRunningTime="2025-11-21 14:24:50.799518589 +0000 UTC m=+1281.451717848" Nov 21 14:24:50 crc kubenswrapper[4774]: I1121 14:24:50.828694 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.828666511 podStartE2EDuration="11.828666511s" podCreationTimestamp="2025-11-21 14:24:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:24:50.826639573 +0000 UTC m=+1281.478838832" watchObservedRunningTime="2025-11-21 14:24:50.828666511 +0000 UTC m=+1281.480865770" Nov 21 14:24:51 crc kubenswrapper[4774]: I1121 14:24:51.414736 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:24:52 crc kubenswrapper[4774]: I1121 14:24:52.816113 4774 generic.go:334] "Generic (PLEG): container finished" podID="a6996afa-3f45-411b-ac41-acf012c9c45e" containerID="61597ca4b4483f6ed3cd70ca3eb4e3b66b8c2f1e1c119be77516f9b9aec25720" exitCode=0 Nov 21 14:24:52 crc kubenswrapper[4774]: I1121 14:24:52.816217 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-97g4j" event={"ID":"a6996afa-3f45-411b-ac41-acf012c9c45e","Type":"ContainerDied","Data":"61597ca4b4483f6ed3cd70ca3eb4e3b66b8c2f1e1c119be77516f9b9aec25720"} Nov 21 14:24:52 crc kubenswrapper[4774]: I1121 14:24:52.988903 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 14:24:52 crc kubenswrapper[4774]: I1121 14:24:52.989056 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:24:53 crc kubenswrapper[4774]: I1121 14:24:52.994389 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.277476 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.415236 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-db-sync-config-data\") pod \"a6996afa-3f45-411b-ac41-acf012c9c45e\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.415298 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-combined-ca-bundle\") pod \"a6996afa-3f45-411b-ac41-acf012c9c45e\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.415480 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vnxx\" (UniqueName: \"kubernetes.io/projected/a6996afa-3f45-411b-ac41-acf012c9c45e-kube-api-access-5vnxx\") pod \"a6996afa-3f45-411b-ac41-acf012c9c45e\" (UID: \"a6996afa-3f45-411b-ac41-acf012c9c45e\") " Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.422688 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a6996afa-3f45-411b-ac41-acf012c9c45e" (UID: "a6996afa-3f45-411b-ac41-acf012c9c45e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.424455 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6996afa-3f45-411b-ac41-acf012c9c45e-kube-api-access-5vnxx" (OuterVolumeSpecName: "kube-api-access-5vnxx") pod "a6996afa-3f45-411b-ac41-acf012c9c45e" (UID: "a6996afa-3f45-411b-ac41-acf012c9c45e"). InnerVolumeSpecName "kube-api-access-5vnxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.442967 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6996afa-3f45-411b-ac41-acf012c9c45e" (UID: "a6996afa-3f45-411b-ac41-acf012c9c45e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.518632 4774 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.518698 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6996afa-3f45-411b-ac41-acf012c9c45e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.518714 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vnxx\" (UniqueName: \"kubernetes.io/projected/a6996afa-3f45-411b-ac41-acf012c9c45e-kube-api-access-5vnxx\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.872940 4774 generic.go:334] "Generic (PLEG): container finished" podID="92400548-ccdd-4e2a-9da5-3aeef0628e31" containerID="b74c59ecf9ca863931ef02c75fc057e4b9cf2d307851a4820767d23b14d83360" exitCode=0 Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.873038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-h86r9" event={"ID":"92400548-ccdd-4e2a-9da5-3aeef0628e31","Type":"ContainerDied","Data":"b74c59ecf9ca863931ef02c75fc057e4b9cf2d307851a4820767d23b14d83360"} Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.876402 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-97g4j" event={"ID":"a6996afa-3f45-411b-ac41-acf012c9c45e","Type":"ContainerDied","Data":"6aba1d7c91bb27a267eedd066b951aaa84a590ed2f9a0f7eb3371e9429a1ad01"} Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.876455 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6aba1d7c91bb27a267eedd066b951aaa84a590ed2f9a0f7eb3371e9429a1ad01" Nov 21 14:24:56 crc kubenswrapper[4774]: I1121 14:24:56.876465 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-97g4j" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.668614 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-74459fb479-fkm77"] Nov 21 14:24:57 crc kubenswrapper[4774]: E1121 14:24:57.669151 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerName="dnsmasq-dns" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.669167 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerName="dnsmasq-dns" Nov 21 14:24:57 crc kubenswrapper[4774]: E1121 14:24:57.669191 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6996afa-3f45-411b-ac41-acf012c9c45e" containerName="barbican-db-sync" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.669197 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6996afa-3f45-411b-ac41-acf012c9c45e" containerName="barbican-db-sync" Nov 21 14:24:57 crc kubenswrapper[4774]: E1121 14:24:57.669206 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerName="init" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.669213 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerName="init" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.669392 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5f3511f-3bfe-4f7b-a3cf-adffe094c7c3" containerName="dnsmasq-dns" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.669409 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6996afa-3f45-411b-ac41-acf012c9c45e" containerName="barbican-db-sync" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.670482 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.676373 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.676708 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.677079 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4rpzt" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.707548 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-74459fb479-fkm77"] Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.750571 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-combined-ca-bundle\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.750794 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data-custom\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.750838 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98c89c8e-6557-46b4-adf8-f954dfff68b3-logs\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.750910 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-672gc\" (UniqueName: \"kubernetes.io/projected/98c89c8e-6557-46b4-adf8-f954dfff68b3-kube-api-access-672gc\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.750939 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.793297 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-866df86b64-5t8kn"] Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.795628 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.799977 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.831155 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-866df86b64-5t8kn"] Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.853178 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-combined-ca-bundle\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.853285 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data-custom\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.853311 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98c89c8e-6557-46b4-adf8-f954dfff68b3-logs\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.853345 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-672gc\" (UniqueName: \"kubernetes.io/projected/98c89c8e-6557-46b4-adf8-f954dfff68b3-kube-api-access-672gc\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.853368 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.858528 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98c89c8e-6557-46b4-adf8-f954dfff68b3-logs\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.863254 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data-custom\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.866888 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-combined-ca-bundle\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.873021 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.896739 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b5f9dc565-tvrfc"] Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.910432 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.910599 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-672gc\" (UniqueName: \"kubernetes.io/projected/98c89c8e-6557-46b4-adf8-f954dfff68b3-kube-api-access-672gc\") pod \"barbican-worker-74459fb479-fkm77\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.925641 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b5f9dc565-tvrfc"] Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.944559 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-central-agent" containerID="cri-o://65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47" gracePeriod=30 Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.944923 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerStarted","Data":"378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172"} Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.944995 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.945041 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="proxy-httpd" containerID="cri-o://378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172" gracePeriod=30 Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.945096 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="sg-core" containerID="cri-o://c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f" gracePeriod=30 Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.945138 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-notification-agent" containerID="cri-o://efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a" gracePeriod=30 Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.974072 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm7h7\" (UniqueName: \"kubernetes.io/projected/8057ad05-b8c9-4742-a0e2-388f0a901595-kube-api-access-nm7h7\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.974158 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-combined-ca-bundle\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.974193 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data-custom\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.974236 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8057ad05-b8c9-4742-a0e2-388f0a901595-logs\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:57 crc kubenswrapper[4774]: I1121 14:24:57.974346 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.007292 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.061142 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.908049027 podStartE2EDuration="49.061107901s" podCreationTimestamp="2025-11-21 14:24:09 +0000 UTC" firstStartedPulling="2025-11-21 14:24:10.593466261 +0000 UTC m=+1241.245665520" lastFinishedPulling="2025-11-21 14:24:56.746525135 +0000 UTC m=+1287.398724394" observedRunningTime="2025-11-21 14:24:58.055758966 +0000 UTC m=+1288.707958225" watchObservedRunningTime="2025-11-21 14:24:58.061107901 +0000 UTC m=+1288.713307160" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.073881 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-f4b4b6596-tc7n9"] Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076633 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-sb\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076727 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8057ad05-b8c9-4742-a0e2-388f0a901595-logs\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076757 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-config\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076879 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4zts\" (UniqueName: \"kubernetes.io/projected/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-kube-api-access-z4zts\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076914 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-nb\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076950 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-swift-storage-0\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.076986 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-svc\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.077031 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.077059 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm7h7\" (UniqueName: \"kubernetes.io/projected/8057ad05-b8c9-4742-a0e2-388f0a901595-kube-api-access-nm7h7\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.077110 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-combined-ca-bundle\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.077131 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data-custom\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.083766 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8057ad05-b8c9-4742-a0e2-388f0a901595-logs\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.097651 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data-custom\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.108449 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-combined-ca-bundle\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.128298 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.129955 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.151877 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f4b4b6596-tc7n9"] Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.152325 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.155591 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm7h7\" (UniqueName: \"kubernetes.io/projected/8057ad05-b8c9-4742-a0e2-388f0a901595-kube-api-access-nm7h7\") pod \"barbican-keystone-listener-866df86b64-5t8kn\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.198221 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data-custom\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.198664 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-sb\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.198699 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.198754 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-config\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199204 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4zts\" (UniqueName: \"kubernetes.io/projected/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-kube-api-access-z4zts\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199236 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-nb\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199274 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-swift-storage-0\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199318 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-svc\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199348 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-combined-ca-bundle\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199372 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e01a2d-c480-4b53-849e-d9bd1ce28d15-logs\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.199408 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6nnj\" (UniqueName: \"kubernetes.io/projected/13e01a2d-c480-4b53-849e-d9bd1ce28d15-kube-api-access-b6nnj\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.200221 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-sb\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.200377 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-config\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.201132 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-svc\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.201376 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-swift-storage-0\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.202527 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-nb\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.244541 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4zts\" (UniqueName: \"kubernetes.io/projected/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-kube-api-access-z4zts\") pod \"dnsmasq-dns-b5f9dc565-tvrfc\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.304177 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-combined-ca-bundle\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.304233 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e01a2d-c480-4b53-849e-d9bd1ce28d15-logs\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.304267 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6nnj\" (UniqueName: \"kubernetes.io/projected/13e01a2d-c480-4b53-849e-d9bd1ce28d15-kube-api-access-b6nnj\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.304306 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data-custom\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.304345 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.306557 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e01a2d-c480-4b53-849e-d9bd1ce28d15-logs\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.318130 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data-custom\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.323104 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.325806 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-combined-ca-bundle\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.336736 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6nnj\" (UniqueName: \"kubernetes.io/projected/13e01a2d-c480-4b53-849e-d9bd1ce28d15-kube-api-access-b6nnj\") pod \"barbican-api-f4b4b6596-tc7n9\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.359386 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.377709 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.429009 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.486777 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.612184 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-combined-ca-bundle\") pod \"92400548-ccdd-4e2a-9da5-3aeef0628e31\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.612339 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-config-data\") pod \"92400548-ccdd-4e2a-9da5-3aeef0628e31\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.612493 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-db-sync-config-data\") pod \"92400548-ccdd-4e2a-9da5-3aeef0628e31\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.612529 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-scripts\") pod \"92400548-ccdd-4e2a-9da5-3aeef0628e31\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.612584 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92400548-ccdd-4e2a-9da5-3aeef0628e31-etc-machine-id\") pod \"92400548-ccdd-4e2a-9da5-3aeef0628e31\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.612619 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cfd7\" (UniqueName: \"kubernetes.io/projected/92400548-ccdd-4e2a-9da5-3aeef0628e31-kube-api-access-6cfd7\") pod \"92400548-ccdd-4e2a-9da5-3aeef0628e31\" (UID: \"92400548-ccdd-4e2a-9da5-3aeef0628e31\") " Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.621222 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/92400548-ccdd-4e2a-9da5-3aeef0628e31-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "92400548-ccdd-4e2a-9da5-3aeef0628e31" (UID: "92400548-ccdd-4e2a-9da5-3aeef0628e31"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.621260 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92400548-ccdd-4e2a-9da5-3aeef0628e31-kube-api-access-6cfd7" (OuterVolumeSpecName: "kube-api-access-6cfd7") pod "92400548-ccdd-4e2a-9da5-3aeef0628e31" (UID: "92400548-ccdd-4e2a-9da5-3aeef0628e31"). InnerVolumeSpecName "kube-api-access-6cfd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.625262 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-scripts" (OuterVolumeSpecName: "scripts") pod "92400548-ccdd-4e2a-9da5-3aeef0628e31" (UID: "92400548-ccdd-4e2a-9da5-3aeef0628e31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.627065 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "92400548-ccdd-4e2a-9da5-3aeef0628e31" (UID: "92400548-ccdd-4e2a-9da5-3aeef0628e31"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.650194 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92400548-ccdd-4e2a-9da5-3aeef0628e31" (UID: "92400548-ccdd-4e2a-9da5-3aeef0628e31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.706784 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-config-data" (OuterVolumeSpecName: "config-data") pod "92400548-ccdd-4e2a-9da5-3aeef0628e31" (UID: "92400548-ccdd-4e2a-9da5-3aeef0628e31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.716016 4774 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.716057 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.716068 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/92400548-ccdd-4e2a-9da5-3aeef0628e31-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.716077 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cfd7\" (UniqueName: \"kubernetes.io/projected/92400548-ccdd-4e2a-9da5-3aeef0628e31-kube-api-access-6cfd7\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.716090 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.716098 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92400548-ccdd-4e2a-9da5-3aeef0628e31-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.735926 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-74459fb479-fkm77"] Nov 21 14:24:58 crc kubenswrapper[4774]: W1121 14:24:58.744122 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98c89c8e_6557_46b4_adf8_f954dfff68b3.slice/crio-383b29ac79abfe6531b39a87f9c64791fd5e6b3324ba0b9a5a53efeae744babc WatchSource:0}: Error finding container 383b29ac79abfe6531b39a87f9c64791fd5e6b3324ba0b9a5a53efeae744babc: Status 404 returned error can't find the container with id 383b29ac79abfe6531b39a87f9c64791fd5e6b3324ba0b9a5a53efeae744babc Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.959945 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74459fb479-fkm77" event={"ID":"98c89c8e-6557-46b4-adf8-f954dfff68b3","Type":"ContainerStarted","Data":"383b29ac79abfe6531b39a87f9c64791fd5e6b3324ba0b9a5a53efeae744babc"} Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.962683 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-h86r9" event={"ID":"92400548-ccdd-4e2a-9da5-3aeef0628e31","Type":"ContainerDied","Data":"5ce74033b17bea79b5d3b887f6a431a04c7af47ae0e2df022c210e9fc116c729"} Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.963207 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-h86r9" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.962769 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ce74033b17bea79b5d3b887f6a431a04c7af47ae0e2df022c210e9fc116c729" Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.965214 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b5f9dc565-tvrfc"] Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.969775 4774 generic.go:334] "Generic (PLEG): container finished" podID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerID="378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172" exitCode=0 Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.969816 4774 generic.go:334] "Generic (PLEG): container finished" podID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerID="c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f" exitCode=2 Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.969881 4774 generic.go:334] "Generic (PLEG): container finished" podID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerID="65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47" exitCode=0 Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.969904 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerDied","Data":"378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172"} Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.969946 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerDied","Data":"c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f"} Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.969959 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerDied","Data":"65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47"} Nov 21 14:24:58 crc kubenswrapper[4774]: I1121 14:24:58.990192 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-866df86b64-5t8kn"] Nov 21 14:24:58 crc kubenswrapper[4774]: W1121 14:24:58.992675 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8057ad05_b8c9_4742_a0e2_388f0a901595.slice/crio-733fb25f34b24ef96aca89233cb4902b2288ab842e8dbc88d77d6eef124dc1a8 WatchSource:0}: Error finding container 733fb25f34b24ef96aca89233cb4902b2288ab842e8dbc88d77d6eef124dc1a8: Status 404 returned error can't find the container with id 733fb25f34b24ef96aca89233cb4902b2288ab842e8dbc88d77d6eef124dc1a8 Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.069491 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-f4b4b6596-tc7n9"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.274864 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b5f9dc565-tvrfc"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.320963 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:24:59 crc kubenswrapper[4774]: E1121 14:24:59.321513 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92400548-ccdd-4e2a-9da5-3aeef0628e31" containerName="cinder-db-sync" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.321537 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="92400548-ccdd-4e2a-9da5-3aeef0628e31" containerName="cinder-db-sync" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.321763 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="92400548-ccdd-4e2a-9da5-3aeef0628e31" containerName="cinder-db-sync" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.322955 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.344011 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.344233 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.344294 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.344367 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.344461 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4zrw2" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.371482 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7445585cd9-5rcvg"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.373995 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.388802 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7445585cd9-5rcvg"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.432577 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-scripts\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.432648 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.432682 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8nnv\" (UniqueName: \"kubernetes.io/projected/b1e4dd31-d339-43bc-95a1-c35e7c14d933-kube-api-access-r8nnv\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.432838 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1e4dd31-d339-43bc-95a1-c35e7c14d933-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.433001 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.433054 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.534750 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-sb\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.534885 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-scripts\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.534924 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-swift-storage-0\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535068 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dsl6\" (UniqueName: \"kubernetes.io/projected/89dc3e22-885f-44a0-af00-ecbce936e8f0-kube-api-access-9dsl6\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535243 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8nnv\" (UniqueName: \"kubernetes.io/projected/b1e4dd31-d339-43bc-95a1-c35e7c14d933-kube-api-access-r8nnv\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535304 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-config\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535497 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1e4dd31-d339-43bc-95a1-c35e7c14d933-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535562 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-nb\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535720 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535751 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.535810 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-svc\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.536951 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1e4dd31-d339-43bc-95a1-c35e7c14d933-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.544520 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-scripts\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.544964 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.549547 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.554731 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.573116 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8nnv\" (UniqueName: \"kubernetes.io/projected/b1e4dd31-d339-43bc-95a1-c35e7c14d933-kube-api-access-r8nnv\") pod \"cinder-scheduler-0\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.580100 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.582022 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.586457 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.589668 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.637363 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-nb\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.637838 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-svc\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.638412 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-sb\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.638571 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-swift-storage-0\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.638675 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dsl6\" (UniqueName: \"kubernetes.io/projected/89dc3e22-885f-44a0-af00-ecbce936e8f0-kube-api-access-9dsl6\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.638787 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-config\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.639391 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-nb\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.640133 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-sb\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.641119 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-svc\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.641606 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-config\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.642290 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-swift-storage-0\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.662625 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dsl6\" (UniqueName: \"kubernetes.io/projected/89dc3e22-885f-44a0-af00-ecbce936e8f0-kube-api-access-9dsl6\") pod \"dnsmasq-dns-7445585cd9-5rcvg\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.677860 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.740907 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-logs\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.742709 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-scripts\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.742750 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data-custom\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.742785 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh2hd\" (UniqueName: \"kubernetes.io/projected/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-kube-api-access-jh2hd\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.742818 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.743004 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.743046 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.760515 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.844684 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845167 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-logs\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845214 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-scripts\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845236 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data-custom\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845265 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh2hd\" (UniqueName: \"kubernetes.io/projected/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-kube-api-access-jh2hd\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845294 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845393 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.845496 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.847396 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-logs\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.852768 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-scripts\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.854540 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.855487 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.859509 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data-custom\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.871520 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh2hd\" (UniqueName: \"kubernetes.io/projected/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-kube-api-access-jh2hd\") pod \"cinder-api-0\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.981612 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.988132 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" event={"ID":"8057ad05-b8c9-4742-a0e2-388f0a901595","Type":"ContainerStarted","Data":"733fb25f34b24ef96aca89233cb4902b2288ab842e8dbc88d77d6eef124dc1a8"} Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.991542 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f4b4b6596-tc7n9" event={"ID":"13e01a2d-c480-4b53-849e-d9bd1ce28d15","Type":"ContainerStarted","Data":"774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48"} Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.991646 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f4b4b6596-tc7n9" event={"ID":"13e01a2d-c480-4b53-849e-d9bd1ce28d15","Type":"ContainerStarted","Data":"3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f"} Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.991660 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f4b4b6596-tc7n9" event={"ID":"13e01a2d-c480-4b53-849e-d9bd1ce28d15","Type":"ContainerStarted","Data":"348a1cbf3a2dcfb003437ceb8d2bb1c441ac16d1e777f1b97e722e6c4d65a46f"} Nov 21 14:24:59 crc kubenswrapper[4774]: I1121 14:24:59.991694 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.029317 4774 generic.go:334] "Generic (PLEG): container finished" podID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerID="ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e" exitCode=0 Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.029564 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" event={"ID":"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae","Type":"ContainerDied","Data":"ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e"} Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.029660 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" event={"ID":"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae","Type":"ContainerStarted","Data":"2f9b1824a25960dd1f38765729f3185ebb1c01f408865e17150abc7ba4ec61b2"} Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.050496 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-f4b4b6596-tc7n9" podStartSLOduration=2.050449589 podStartE2EDuration="2.050449589s" podCreationTimestamp="2025-11-21 14:24:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:00.024696404 +0000 UTC m=+1290.676895673" watchObservedRunningTime="2025-11-21 14:25:00.050449589 +0000 UTC m=+1290.702648848" Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.171857 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.172310 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.253990 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.261080 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.272929 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:00 crc kubenswrapper[4774]: I1121 14:25:00.371275 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7445585cd9-5rcvg"] Nov 21 14:25:00 crc kubenswrapper[4774]: W1121 14:25:00.571528 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89dc3e22_885f_44a0_af00_ecbce936e8f0.slice/crio-1f456397eb426bbd92e0271e5005892d4c140c205a2f6224911f54debb55887b WatchSource:0}: Error finding container 1f456397eb426bbd92e0271e5005892d4c140c205a2f6224911f54debb55887b: Status 404 returned error can't find the container with id 1f456397eb426bbd92e0271e5005892d4c140c205a2f6224911f54debb55887b Nov 21 14:25:01 crc kubenswrapper[4774]: I1121 14:25:01.040864 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1e4dd31-d339-43bc-95a1-c35e7c14d933","Type":"ContainerStarted","Data":"9358e22c75634b5446a2d2c3ff5d2673df955bb4f4b36acdda14db01ea329cce"} Nov 21 14:25:01 crc kubenswrapper[4774]: I1121 14:25:01.041530 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" event={"ID":"89dc3e22-885f-44a0-af00-ecbce936e8f0","Type":"ContainerStarted","Data":"1f456397eb426bbd92e0271e5005892d4c140c205a2f6224911f54debb55887b"} Nov 21 14:25:01 crc kubenswrapper[4774]: I1121 14:25:01.042788 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:25:01 crc kubenswrapper[4774]: I1121 14:25:01.043284 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:01 crc kubenswrapper[4774]: I1121 14:25:01.043575 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:01 crc kubenswrapper[4774]: I1121 14:25:01.894256 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.053924 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" event={"ID":"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae","Type":"ContainerStarted","Data":"e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776"} Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.054068 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerName="dnsmasq-dns" containerID="cri-o://e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776" gracePeriod=10 Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.054478 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.059117 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" event={"ID":"8057ad05-b8c9-4742-a0e2-388f0a901595","Type":"ContainerStarted","Data":"1bed157d3f1b09ec22281912c29b9fe8e5b372b41ebbf607b1b08a4791141c7e"} Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.063350 4774 generic.go:334] "Generic (PLEG): container finished" podID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerID="03073443effb0b0a18d4fc85e2231f3d0ab56596686b0d287b76bcc883717466" exitCode=0 Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.063402 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" event={"ID":"89dc3e22-885f-44a0-af00-ecbce936e8f0","Type":"ContainerDied","Data":"03073443effb0b0a18d4fc85e2231f3d0ab56596686b0d287b76bcc883717466"} Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.066739 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74459fb479-fkm77" event={"ID":"98c89c8e-6557-46b4-adf8-f954dfff68b3","Type":"ContainerStarted","Data":"571c60de19b673d0a2cf6499c4d0ad765e15a4171ae78a3dcb8552bb3605e8e6"} Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.069035 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2","Type":"ContainerStarted","Data":"f5abe4b6e11635212e5f7ae68eadb46313737d58937e7af55ba15dbc727e7379"} Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.092251 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" podStartSLOduration=5.092225362 podStartE2EDuration="5.092225362s" podCreationTimestamp="2025-11-21 14:24:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:02.08006807 +0000 UTC m=+1292.732267349" watchObservedRunningTime="2025-11-21 14:25:02.092225362 +0000 UTC m=+1292.744424611" Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.752921 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.952375 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4zts\" (UniqueName: \"kubernetes.io/projected/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-kube-api-access-z4zts\") pod \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.952566 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-config\") pod \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.952657 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-svc\") pod \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.952744 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-sb\") pod \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.952762 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-swift-storage-0\") pod \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.952779 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-nb\") pod \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\" (UID: \"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae\") " Nov 21 14:25:02 crc kubenswrapper[4774]: I1121 14:25:02.992185 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-kube-api-access-z4zts" (OuterVolumeSpecName: "kube-api-access-z4zts") pod "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" (UID: "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae"). InnerVolumeSpecName "kube-api-access-z4zts". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.026416 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.066967 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4zts\" (UniqueName: \"kubernetes.io/projected/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-kube-api-access-z4zts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.074657 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" (UID: "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.084254 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" (UID: "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.114736 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-config" (OuterVolumeSpecName: "config") pod "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" (UID: "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.122383 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1e4dd31-d339-43bc-95a1-c35e7c14d933","Type":"ContainerStarted","Data":"b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.127742 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" (UID: "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.148792 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2","Type":"ContainerStarted","Data":"a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.168229 4774 generic.go:334] "Generic (PLEG): container finished" podID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerID="e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776" exitCode=0 Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.168612 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.168473 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" event={"ID":"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae","Type":"ContainerDied","Data":"e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.168971 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b5f9dc565-tvrfc" event={"ID":"18e75eaa-b138-44c5-b2cd-2a19ed1af0ae","Type":"ContainerDied","Data":"2f9b1824a25960dd1f38765729f3185ebb1c01f408865e17150abc7ba4ec61b2"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.169007 4774 scope.go:117] "RemoveContainer" containerID="e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.177552 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.177603 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.177621 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.177634 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.188040 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" (UID: "18e75eaa-b138-44c5-b2cd-2a19ed1af0ae"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.198076 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" event={"ID":"8057ad05-b8c9-4742-a0e2-388f0a901595","Type":"ContainerStarted","Data":"72732cc3c72816545f8f6bd38e3894a25c914501104260c3cdc2219287bc3e97"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.201101 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" event={"ID":"89dc3e22-885f-44a0-af00-ecbce936e8f0","Type":"ContainerStarted","Data":"5311aea5b99af3838b2c7927c5d0fe8e8d4e1aa5679eb1dee621e86bd7201b81"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.201286 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.222086 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" podStartSLOduration=3.737538667 podStartE2EDuration="6.222055508s" podCreationTimestamp="2025-11-21 14:24:57 +0000 UTC" firstStartedPulling="2025-11-21 14:24:58.995141257 +0000 UTC m=+1289.647340516" lastFinishedPulling="2025-11-21 14:25:01.479658098 +0000 UTC m=+1292.131857357" observedRunningTime="2025-11-21 14:25:03.218937667 +0000 UTC m=+1293.871136946" watchObservedRunningTime="2025-11-21 14:25:03.222055508 +0000 UTC m=+1293.874254777" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.222862 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74459fb479-fkm77" event={"ID":"98c89c8e-6557-46b4-adf8-f954dfff68b3","Type":"ContainerStarted","Data":"123168316f49f1c892f63242e19f929cf760d9f3f3bdcc32a34469541b54b183"} Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.246637 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" podStartSLOduration=4.246608457 podStartE2EDuration="4.246608457s" podCreationTimestamp="2025-11-21 14:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:03.245497125 +0000 UTC m=+1293.897696384" watchObservedRunningTime="2025-11-21 14:25:03.246608457 +0000 UTC m=+1293.898807706" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.247764 4774 scope.go:117] "RemoveContainer" containerID="ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.282570 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.290026 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-74459fb479-fkm77" podStartSLOduration=3.559743419 podStartE2EDuration="6.289976021s" podCreationTimestamp="2025-11-21 14:24:57 +0000 UTC" firstStartedPulling="2025-11-21 14:24:58.749559289 +0000 UTC m=+1289.401758548" lastFinishedPulling="2025-11-21 14:25:01.479791891 +0000 UTC m=+1292.131991150" observedRunningTime="2025-11-21 14:25:03.278031996 +0000 UTC m=+1293.930231255" watchObservedRunningTime="2025-11-21 14:25:03.289976021 +0000 UTC m=+1293.942175290" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.346288 4774 scope.go:117] "RemoveContainer" containerID="e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776" Nov 21 14:25:03 crc kubenswrapper[4774]: E1121 14:25:03.348425 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776\": container with ID starting with e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776 not found: ID does not exist" containerID="e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.348467 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776"} err="failed to get container status \"e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776\": rpc error: code = NotFound desc = could not find container \"e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776\": container with ID starting with e10be800237e214df0603cf5035c00a6193d9e67d3142e3704b968468b2ef776 not found: ID does not exist" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.348500 4774 scope.go:117] "RemoveContainer" containerID="ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e" Nov 21 14:25:03 crc kubenswrapper[4774]: E1121 14:25:03.349518 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e\": container with ID starting with ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e not found: ID does not exist" containerID="ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.349594 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e"} err="failed to get container status \"ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e\": rpc error: code = NotFound desc = could not find container \"ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e\": container with ID starting with ff4ec490621d81eb0e39e2fa8280642d7c4bf77deb42fa724e012b6618f3c52e not found: ID does not exist" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.523936 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b5f9dc565-tvrfc"] Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.540896 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b5f9dc565-tvrfc"] Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.852168 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898272 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-config-data\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898333 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-scripts\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898501 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw5xq\" (UniqueName: \"kubernetes.io/projected/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-kube-api-access-cw5xq\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898570 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-sg-core-conf-yaml\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898639 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-log-httpd\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898673 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-run-httpd\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.898756 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-combined-ca-bundle\") pod \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\" (UID: \"7b84a14b-18fb-4c32-9fe8-81822e98ab6d\") " Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.901060 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.901340 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.916217 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-scripts" (OuterVolumeSpecName: "scripts") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.935234 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-kube-api-access-cw5xq" (OuterVolumeSpecName: "kube-api-access-cw5xq") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "kube-api-access-cw5xq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.964889 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.976688 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:03 crc kubenswrapper[4774]: I1121 14:25:03.976818 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.001998 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.002037 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.002050 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.002064 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw5xq\" (UniqueName: \"kubernetes.io/projected/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-kube-api-access-cw5xq\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.002077 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.035979 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-config-data" (OuterVolumeSpecName: "config-data") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.049672 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.103516 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.132311 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" path="/var/lib/kubelet/pods/18e75eaa-b138-44c5-b2cd-2a19ed1af0ae/volumes" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.133610 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b84a14b-18fb-4c32-9fe8-81822e98ab6d" (UID: "7b84a14b-18fb-4c32-9fe8-81822e98ab6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.207490 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b84a14b-18fb-4c32-9fe8-81822e98ab6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.263528 4774 generic.go:334] "Generic (PLEG): container finished" podID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerID="efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a" exitCode=0 Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.263681 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerDied","Data":"efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a"} Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.263734 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7b84a14b-18fb-4c32-9fe8-81822e98ab6d","Type":"ContainerDied","Data":"393aec3156581563b37a77fe03b1c7e16daf1c74ca9252dfd144189fa6a586c4"} Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.263761 4774 scope.go:117] "RemoveContainer" containerID="378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.263992 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.275968 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1e4dd31-d339-43bc-95a1-c35e7c14d933","Type":"ContainerStarted","Data":"9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf"} Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.290680 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2","Type":"ContainerStarted","Data":"a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0"} Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.290908 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api-log" containerID="cri-o://a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f" gracePeriod=30 Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.291075 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.291124 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api" containerID="cri-o://a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0" gracePeriod=30 Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.292460 4774 scope.go:117] "RemoveContainer" containerID="c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.357544 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.357517386 podStartE2EDuration="5.357517386s" podCreationTimestamp="2025-11-21 14:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:04.340488954 +0000 UTC m=+1294.992688203" watchObservedRunningTime="2025-11-21 14:25:04.357517386 +0000 UTC m=+1295.009716645" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.358896 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.926694932 podStartE2EDuration="5.358883156s" podCreationTimestamp="2025-11-21 14:24:59 +0000 UTC" firstStartedPulling="2025-11-21 14:25:00.599367714 +0000 UTC m=+1291.251566973" lastFinishedPulling="2025-11-21 14:25:02.031555948 +0000 UTC m=+1292.683755197" observedRunningTime="2025-11-21 14:25:04.310270241 +0000 UTC m=+1294.962469500" watchObservedRunningTime="2025-11-21 14:25:04.358883156 +0000 UTC m=+1295.011082405" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.362796 4774 scope.go:117] "RemoveContainer" containerID="efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.386948 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.401141 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.455622 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.456152 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="sg-core" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456168 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="sg-core" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.456184 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-central-agent" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456190 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-central-agent" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.456197 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="proxy-httpd" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456207 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="proxy-httpd" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.456219 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-notification-agent" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456227 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-notification-agent" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.456239 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerName="dnsmasq-dns" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456247 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerName="dnsmasq-dns" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.456271 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerName="init" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456277 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerName="init" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456524 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-notification-agent" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456545 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="ceilometer-central-agent" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.456553 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="18e75eaa-b138-44c5-b2cd-2a19ed1af0ae" containerName="dnsmasq-dns" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.464060 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="proxy-httpd" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.464094 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" containerName="sg-core" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.474926 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.475082 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.490499 4774 scope.go:117] "RemoveContainer" containerID="65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.490877 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.491142 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.563899 4774 scope.go:117] "RemoveContainer" containerID="378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.565870 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172\": container with ID starting with 378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172 not found: ID does not exist" containerID="378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.565938 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172"} err="failed to get container status \"378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172\": rpc error: code = NotFound desc = could not find container \"378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172\": container with ID starting with 378b357d3dbee62e232a310ddd97a1de48d8261df5361db37b74c6145882a172 not found: ID does not exist" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.565987 4774 scope.go:117] "RemoveContainer" containerID="c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.570107 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f\": container with ID starting with c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f not found: ID does not exist" containerID="c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.570187 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f"} err="failed to get container status \"c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f\": rpc error: code = NotFound desc = could not find container \"c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f\": container with ID starting with c93268fe8fa21f7e07bb5e21d6b9049ccdddceea62981b6bc03fb721b551568f not found: ID does not exist" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.570224 4774 scope.go:117] "RemoveContainer" containerID="efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.571152 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a\": container with ID starting with efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a not found: ID does not exist" containerID="efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.571210 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a"} err="failed to get container status \"efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a\": rpc error: code = NotFound desc = could not find container \"efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a\": container with ID starting with efbc988d10842a34994983053c28f606163ed63bbfe62d1065f411d9f2a3675a not found: ID does not exist" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.571254 4774 scope.go:117] "RemoveContainer" containerID="65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47" Nov 21 14:25:04 crc kubenswrapper[4774]: E1121 14:25:04.572700 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47\": container with ID starting with 65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47 not found: ID does not exist" containerID="65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.572730 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47"} err="failed to get container status \"65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47\": rpc error: code = NotFound desc = could not find container \"65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47\": container with ID starting with 65d215d6bfdb4dd88c1a1ddd00e1d288a67631c7b0b044032c693544534e3c47 not found: ID does not exist" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633068 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqjzh\" (UniqueName: \"kubernetes.io/projected/7f574865-7b2e-40c0-8e65-a554ca66b76d-kube-api-access-rqjzh\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633135 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-config-data\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633242 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-log-httpd\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633347 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-scripts\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633407 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633486 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-run-httpd\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.633601 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.678899 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.735715 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-scripts\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.735858 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.735915 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-run-httpd\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.736012 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.736063 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqjzh\" (UniqueName: \"kubernetes.io/projected/7f574865-7b2e-40c0-8e65-a554ca66b76d-kube-api-access-rqjzh\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.736094 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-config-data\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.736184 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-log-httpd\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.736776 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-log-httpd\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.736832 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-run-httpd\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.745622 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-config-data\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.746222 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.752685 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.764495 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-scripts\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.776503 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqjzh\" (UniqueName: \"kubernetes.io/projected/7f574865-7b2e-40c0-8e65-a554ca66b76d-kube-api-access-rqjzh\") pod \"ceilometer-0\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " pod="openstack/ceilometer-0" Nov 21 14:25:04 crc kubenswrapper[4774]: I1121 14:25:04.825441 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.066926 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-576b48cd9b-wr2q7"] Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.085546 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.092200 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.092260 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.163036 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-576b48cd9b-wr2q7"] Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271165 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data-custom\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271227 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7xt4\" (UniqueName: \"kubernetes.io/projected/1718aee5-94ce-4682-aa62-28843ff1e2ef-kube-api-access-b7xt4\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271248 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-combined-ca-bundle\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271492 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1718aee5-94ce-4682-aa62-28843ff1e2ef-logs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271605 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-internal-tls-certs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-public-tls-certs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.271893 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.316465 4774 generic.go:334] "Generic (PLEG): container finished" podID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerID="a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f" exitCode=143 Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.316551 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2","Type":"ContainerDied","Data":"a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f"} Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374053 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1718aee5-94ce-4682-aa62-28843ff1e2ef-logs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374118 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-internal-tls-certs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374206 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-public-tls-certs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374237 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374324 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data-custom\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374348 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7xt4\" (UniqueName: \"kubernetes.io/projected/1718aee5-94ce-4682-aa62-28843ff1e2ef-kube-api-access-b7xt4\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.374367 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-combined-ca-bundle\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.375765 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1718aee5-94ce-4682-aa62-28843ff1e2ef-logs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.383052 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-combined-ca-bundle\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.385095 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-internal-tls-certs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.386700 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-public-tls-certs\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.390235 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data-custom\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.391618 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.417679 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7xt4\" (UniqueName: \"kubernetes.io/projected/1718aee5-94ce-4682-aa62-28843ff1e2ef-kube-api-access-b7xt4\") pod \"barbican-api-576b48cd9b-wr2q7\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.444990 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.646292 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:05 crc kubenswrapper[4774]: I1121 14:25:05.964005 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-576b48cd9b-wr2q7"] Nov 21 14:25:05 crc kubenswrapper[4774]: W1121 14:25:05.969683 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1718aee5_94ce_4682_aa62_28843ff1e2ef.slice/crio-8f073fc66f16b8911daaf68519635469730b95292fc8f8051fe20535c85a2b4c WatchSource:0}: Error finding container 8f073fc66f16b8911daaf68519635469730b95292fc8f8051fe20535c85a2b4c: Status 404 returned error can't find the container with id 8f073fc66f16b8911daaf68519635469730b95292fc8f8051fe20535c85a2b4c Nov 21 14:25:06 crc kubenswrapper[4774]: I1121 14:25:06.104833 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b84a14b-18fb-4c32-9fe8-81822e98ab6d" path="/var/lib/kubelet/pods/7b84a14b-18fb-4c32-9fe8-81822e98ab6d/volumes" Nov 21 14:25:06 crc kubenswrapper[4774]: I1121 14:25:06.330192 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerStarted","Data":"42c746973c1c44c63502c7a509116387c1c6c6a6c0b8d19c46fa5191ca7403d3"} Nov 21 14:25:06 crc kubenswrapper[4774]: I1121 14:25:06.332510 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-576b48cd9b-wr2q7" event={"ID":"1718aee5-94ce-4682-aa62-28843ff1e2ef","Type":"ContainerStarted","Data":"95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd"} Nov 21 14:25:06 crc kubenswrapper[4774]: I1121 14:25:06.332544 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-576b48cd9b-wr2q7" event={"ID":"1718aee5-94ce-4682-aa62-28843ff1e2ef","Type":"ContainerStarted","Data":"8f073fc66f16b8911daaf68519635469730b95292fc8f8051fe20535c85a2b4c"} Nov 21 14:25:06 crc kubenswrapper[4774]: I1121 14:25:06.886839 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:25:07 crc kubenswrapper[4774]: I1121 14:25:07.346141 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerStarted","Data":"6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a"} Nov 21 14:25:07 crc kubenswrapper[4774]: I1121 14:25:07.346578 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerStarted","Data":"fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21"} Nov 21 14:25:07 crc kubenswrapper[4774]: I1121 14:25:07.348100 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-576b48cd9b-wr2q7" event={"ID":"1718aee5-94ce-4682-aa62-28843ff1e2ef","Type":"ContainerStarted","Data":"1a0a15cfd145eaf76485365a7148d16577e73bc0add7da74f8b15ec9b79a5303"} Nov 21 14:25:07 crc kubenswrapper[4774]: I1121 14:25:07.348311 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:07 crc kubenswrapper[4774]: I1121 14:25:07.387581 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-576b48cd9b-wr2q7" podStartSLOduration=2.387556629 podStartE2EDuration="2.387556629s" podCreationTimestamp="2025-11-21 14:25:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:07.3720266 +0000 UTC m=+1298.024225869" watchObservedRunningTime="2025-11-21 14:25:07.387556629 +0000 UTC m=+1298.039755888" Nov 21 14:25:08 crc kubenswrapper[4774]: I1121 14:25:08.359398 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerStarted","Data":"aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397"} Nov 21 14:25:08 crc kubenswrapper[4774]: I1121 14:25:08.360159 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.372204 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerStarted","Data":"c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2"} Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.397546 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.342106714 podStartE2EDuration="5.397522156s" podCreationTimestamp="2025-11-21 14:25:04 +0000 UTC" firstStartedPulling="2025-11-21 14:25:05.670393292 +0000 UTC m=+1296.322592551" lastFinishedPulling="2025-11-21 14:25:08.725808734 +0000 UTC m=+1299.378007993" observedRunningTime="2025-11-21 14:25:09.394375705 +0000 UTC m=+1300.046574984" watchObservedRunningTime="2025-11-21 14:25:09.397522156 +0000 UTC m=+1300.049721415" Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.526346 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.611262 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ffc8bc4bd-l7vz6"] Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.611540 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5ffc8bc4bd-l7vz6" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-api" containerID="cri-o://29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb" gracePeriod=30 Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.611710 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5ffc8bc4bd-l7vz6" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-httpd" containerID="cri-o://a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad" gracePeriod=30 Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.770051 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.860209 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-797dcf9445-xzkbz"] Nov 21 14:25:09 crc kubenswrapper[4774]: I1121 14:25:09.860905 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerName="dnsmasq-dns" containerID="cri-o://3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7" gracePeriod=10 Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.087886 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 21 14:25:10 crc kubenswrapper[4774]: E1121 14:25:10.095992 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c606266_0467_4aeb_85ae_10f0643e09d2.slice/crio-3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c606266_0467_4aeb_85ae_10f0643e09d2.slice/crio-conmon-3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7.scope\": RecentStats: unable to find data in memory cache]" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.139871 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.167863 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.226489 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.418969 4774 generic.go:334] "Generic (PLEG): container finished" podID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerID="3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7" exitCode=0 Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.419499 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" event={"ID":"5c606266-0467-4aeb-85ae-10f0643e09d2","Type":"ContainerDied","Data":"3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7"} Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.440193 4774 generic.go:334] "Generic (PLEG): container finished" podID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerID="a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad" exitCode=0 Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.440944 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ffc8bc4bd-l7vz6" event={"ID":"f5ea34fd-2d79-4102-bf44-99ba443fc794","Type":"ContainerDied","Data":"a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad"} Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.442699 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="cinder-scheduler" containerID="cri-o://b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44" gracePeriod=30 Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.443020 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.443398 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="probe" containerID="cri-o://9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf" gracePeriod=30 Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.592314 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.728696 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-svc\") pod \"5c606266-0467-4aeb-85ae-10f0643e09d2\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.728926 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-sb\") pod \"5c606266-0467-4aeb-85ae-10f0643e09d2\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.729008 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-swift-storage-0\") pod \"5c606266-0467-4aeb-85ae-10f0643e09d2\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.729031 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-config\") pod \"5c606266-0467-4aeb-85ae-10f0643e09d2\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.729054 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmzqr\" (UniqueName: \"kubernetes.io/projected/5c606266-0467-4aeb-85ae-10f0643e09d2-kube-api-access-nmzqr\") pod \"5c606266-0467-4aeb-85ae-10f0643e09d2\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.729156 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-nb\") pod \"5c606266-0467-4aeb-85ae-10f0643e09d2\" (UID: \"5c606266-0467-4aeb-85ae-10f0643e09d2\") " Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.757467 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c606266-0467-4aeb-85ae-10f0643e09d2-kube-api-access-nmzqr" (OuterVolumeSpecName: "kube-api-access-nmzqr") pod "5c606266-0467-4aeb-85ae-10f0643e09d2" (UID: "5c606266-0467-4aeb-85ae-10f0643e09d2"). InnerVolumeSpecName "kube-api-access-nmzqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.813871 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.831616 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmzqr\" (UniqueName: \"kubernetes.io/projected/5c606266-0467-4aeb-85ae-10f0643e09d2-kube-api-access-nmzqr\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.841148 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5c606266-0467-4aeb-85ae-10f0643e09d2" (UID: "5c606266-0467-4aeb-85ae-10f0643e09d2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.888342 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5c606266-0467-4aeb-85ae-10f0643e09d2" (UID: "5c606266-0467-4aeb-85ae-10f0643e09d2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.916488 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5c606266-0467-4aeb-85ae-10f0643e09d2" (UID: "5c606266-0467-4aeb-85ae-10f0643e09d2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.928582 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5c606266-0467-4aeb-85ae-10f0643e09d2" (UID: "5c606266-0467-4aeb-85ae-10f0643e09d2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.934953 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.935169 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.936093 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:10 crc kubenswrapper[4774]: I1121 14:25:10.936172 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.027512 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-config" (OuterVolumeSpecName: "config") pod "5c606266-0467-4aeb-85ae-10f0643e09d2" (UID: "5c606266-0467-4aeb-85ae-10f0643e09d2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.044540 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c606266-0467-4aeb-85ae-10f0643e09d2-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.455136 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" event={"ID":"5c606266-0467-4aeb-85ae-10f0643e09d2","Type":"ContainerDied","Data":"cbb305b74e1df5f109fc9aefef4c9f9af74f9c2b8ee629b775268ea171deec40"} Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.455217 4774 scope.go:117] "RemoveContainer" containerID="3cf3b06170c900cbdf012cea2f74d24d307cde15ee42b6765918d613b43ccfe7" Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.455327 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-797dcf9445-xzkbz" Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.472570 4774 generic.go:334] "Generic (PLEG): container finished" podID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerID="9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf" exitCode=0 Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.474946 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1e4dd31-d339-43bc-95a1-c35e7c14d933","Type":"ContainerDied","Data":"9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf"} Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.492289 4774 scope.go:117] "RemoveContainer" containerID="1098b44e16a7dea1fd692b56bb336294c0c6ddbe1b030abb2d8ed9a0f0299526" Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.520353 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-797dcf9445-xzkbz"] Nov 21 14:25:11 crc kubenswrapper[4774]: I1121 14:25:11.534309 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-797dcf9445-xzkbz"] Nov 21 14:25:12 crc kubenswrapper[4774]: I1121 14:25:12.105718 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" path="/var/lib/kubelet/pods/5c606266-0467-4aeb-85ae-10f0643e09d2/volumes" Nov 21 14:25:12 crc kubenswrapper[4774]: I1121 14:25:12.954468 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 21 14:25:14 crc kubenswrapper[4774]: I1121 14:25:14.316137 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.128717 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.180739 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-scripts\") pod \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.180836 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data\") pod \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.180910 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-combined-ca-bundle\") pod \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.181026 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8nnv\" (UniqueName: \"kubernetes.io/projected/b1e4dd31-d339-43bc-95a1-c35e7c14d933-kube-api-access-r8nnv\") pod \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.181201 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1e4dd31-d339-43bc-95a1-c35e7c14d933-etc-machine-id\") pod \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.181256 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data-custom\") pod \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\" (UID: \"b1e4dd31-d339-43bc-95a1-c35e7c14d933\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.204359 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b1e4dd31-d339-43bc-95a1-c35e7c14d933-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b1e4dd31-d339-43bc-95a1-c35e7c14d933" (UID: "b1e4dd31-d339-43bc-95a1-c35e7c14d933"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.218255 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b1e4dd31-d339-43bc-95a1-c35e7c14d933" (UID: "b1e4dd31-d339-43bc-95a1-c35e7c14d933"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.224056 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1e4dd31-d339-43bc-95a1-c35e7c14d933-kube-api-access-r8nnv" (OuterVolumeSpecName: "kube-api-access-r8nnv") pod "b1e4dd31-d339-43bc-95a1-c35e7c14d933" (UID: "b1e4dd31-d339-43bc-95a1-c35e7c14d933"). InnerVolumeSpecName "kube-api-access-r8nnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.238378 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-scripts" (OuterVolumeSpecName: "scripts") pod "b1e4dd31-d339-43bc-95a1-c35e7c14d933" (UID: "b1e4dd31-d339-43bc-95a1-c35e7c14d933"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.253325 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1e4dd31-d339-43bc-95a1-c35e7c14d933" (UID: "b1e4dd31-d339-43bc-95a1-c35e7c14d933"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.291927 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.291985 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.292003 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.292018 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8nnv\" (UniqueName: \"kubernetes.io/projected/b1e4dd31-d339-43bc-95a1-c35e7c14d933-kube-api-access-r8nnv\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.292035 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b1e4dd31-d339-43bc-95a1-c35e7c14d933-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.349632 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.432149 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data" (OuterVolumeSpecName: "config-data") pod "b1e4dd31-d339-43bc-95a1-c35e7c14d933" (UID: "b1e4dd31-d339-43bc-95a1-c35e7c14d933"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.498331 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-combined-ca-bundle\") pod \"f5ea34fd-2d79-4102-bf44-99ba443fc794\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.498408 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fqvr\" (UniqueName: \"kubernetes.io/projected/f5ea34fd-2d79-4102-bf44-99ba443fc794-kube-api-access-6fqvr\") pod \"f5ea34fd-2d79-4102-bf44-99ba443fc794\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.498448 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-httpd-config\") pod \"f5ea34fd-2d79-4102-bf44-99ba443fc794\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.499014 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-config\") pod \"f5ea34fd-2d79-4102-bf44-99ba443fc794\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.499187 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-ovndb-tls-certs\") pod \"f5ea34fd-2d79-4102-bf44-99ba443fc794\" (UID: \"f5ea34fd-2d79-4102-bf44-99ba443fc794\") " Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.499711 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e4dd31-d339-43bc-95a1-c35e7c14d933-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.506499 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f5ea34fd-2d79-4102-bf44-99ba443fc794" (UID: "f5ea34fd-2d79-4102-bf44-99ba443fc794"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.511020 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5ea34fd-2d79-4102-bf44-99ba443fc794-kube-api-access-6fqvr" (OuterVolumeSpecName: "kube-api-access-6fqvr") pod "f5ea34fd-2d79-4102-bf44-99ba443fc794" (UID: "f5ea34fd-2d79-4102-bf44-99ba443fc794"). InnerVolumeSpecName "kube-api-access-6fqvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.556597 4774 generic.go:334] "Generic (PLEG): container finished" podID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerID="b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44" exitCode=0 Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.556677 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1e4dd31-d339-43bc-95a1-c35e7c14d933","Type":"ContainerDied","Data":"b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44"} Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.556712 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b1e4dd31-d339-43bc-95a1-c35e7c14d933","Type":"ContainerDied","Data":"9358e22c75634b5446a2d2c3ff5d2673df955bb4f4b36acdda14db01ea329cce"} Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.556729 4774 scope.go:117] "RemoveContainer" containerID="9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.556918 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.577957 4774 generic.go:334] "Generic (PLEG): container finished" podID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerID="29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb" exitCode=0 Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.578030 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ffc8bc4bd-l7vz6" event={"ID":"f5ea34fd-2d79-4102-bf44-99ba443fc794","Type":"ContainerDied","Data":"29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb"} Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.578076 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5ffc8bc4bd-l7vz6" event={"ID":"f5ea34fd-2d79-4102-bf44-99ba443fc794","Type":"ContainerDied","Data":"b7188a38fa19d1ee33f224da408204f164feed1a1d39cf72e229d126d304c455"} Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.578170 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5ffc8bc4bd-l7vz6" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.604075 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fqvr\" (UniqueName: \"kubernetes.io/projected/f5ea34fd-2d79-4102-bf44-99ba443fc794-kube-api-access-6fqvr\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.604113 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.619003 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-config" (OuterVolumeSpecName: "config") pod "f5ea34fd-2d79-4102-bf44-99ba443fc794" (UID: "f5ea34fd-2d79-4102-bf44-99ba443fc794"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.621829 4774 scope.go:117] "RemoveContainer" containerID="b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.647924 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.650395 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5ea34fd-2d79-4102-bf44-99ba443fc794" (UID: "f5ea34fd-2d79-4102-bf44-99ba443fc794"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.666910 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.672769 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.673213 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerName="dnsmasq-dns" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673228 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerName="dnsmasq-dns" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.673251 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="probe" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673258 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="probe" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.673276 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-httpd" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673285 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-httpd" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.673298 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerName="init" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673304 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerName="init" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.673322 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="cinder-scheduler" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673328 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="cinder-scheduler" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.673342 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-api" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673348 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-api" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673537 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="probe" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673552 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" containerName="cinder-scheduler" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673561 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-httpd" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673573 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c606266-0467-4aeb-85ae-10f0643e09d2" containerName="dnsmasq-dns" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.673583 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" containerName="neutron-api" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.674609 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.684404 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.709262 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.709300 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.718589 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.740987 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f5ea34fd-2d79-4102-bf44-99ba443fc794" (UID: "f5ea34fd-2d79-4102-bf44-99ba443fc794"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.811879 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.812093 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn56k\" (UniqueName: \"kubernetes.io/projected/75187d0f-77b0-45ee-a452-1850f0fe7851-kube-api-access-mn56k\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.812132 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75187d0f-77b0-45ee-a452-1850f0fe7851-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.812244 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-scripts\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.812286 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.812406 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.812587 4774 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5ea34fd-2d79-4102-bf44-99ba443fc794-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.819021 4774 scope.go:117] "RemoveContainer" containerID="9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.823671 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf\": container with ID starting with 9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf not found: ID does not exist" containerID="9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.823733 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf"} err="failed to get container status \"9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf\": rpc error: code = NotFound desc = could not find container \"9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf\": container with ID starting with 9d20e074a4e1a4df82589dc93569955e26a8de274d417ab80e199ac1933b7adf not found: ID does not exist" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.823771 4774 scope.go:117] "RemoveContainer" containerID="b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.828009 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44\": container with ID starting with b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44 not found: ID does not exist" containerID="b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.828067 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44"} err="failed to get container status \"b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44\": rpc error: code = NotFound desc = could not find container \"b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44\": container with ID starting with b2d420d9001a104eb41c15894599d60aa27fc58a42cc24e7a01a0fdff1629d44 not found: ID does not exist" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.828102 4774 scope.go:117] "RemoveContainer" containerID="a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.858322 4774 scope.go:117] "RemoveContainer" containerID="29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.890118 4774 scope.go:117] "RemoveContainer" containerID="a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.891090 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad\": container with ID starting with a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad not found: ID does not exist" containerID="a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.891147 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad"} err="failed to get container status \"a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad\": rpc error: code = NotFound desc = could not find container \"a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad\": container with ID starting with a70a2e5c1d186e0ab525b559ace69fe744d58eed3db7c78f9bee153420ecd8ad not found: ID does not exist" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.891188 4774 scope.go:117] "RemoveContainer" containerID="29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb" Nov 21 14:25:16 crc kubenswrapper[4774]: E1121 14:25:16.892542 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb\": container with ID starting with 29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb not found: ID does not exist" containerID="29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.892597 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb"} err="failed to get container status \"29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb\": rpc error: code = NotFound desc = could not find container \"29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb\": container with ID starting with 29f92a7da4b5672bdc08039e9dcbc8b86f1b4a6938f35776af7831f3ad1d35fb not found: ID does not exist" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.914678 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.914804 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75187d0f-77b0-45ee-a452-1850f0fe7851-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.914846 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn56k\" (UniqueName: \"kubernetes.io/projected/75187d0f-77b0-45ee-a452-1850f0fe7851-kube-api-access-mn56k\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.914939 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-scripts\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.914998 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.915025 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75187d0f-77b0-45ee-a452-1850f0fe7851-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.915112 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.922906 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5ffc8bc4bd-l7vz6"] Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.924276 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.924430 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-scripts\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.924947 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.931505 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5ffc8bc4bd-l7vz6"] Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.933452 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:16 crc kubenswrapper[4774]: I1121 14:25:16.945501 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn56k\" (UniqueName: \"kubernetes.io/projected/75187d0f-77b0-45ee-a452-1850f0fe7851-kube-api-access-mn56k\") pod \"cinder-scheduler-0\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " pod="openstack/cinder-scheduler-0" Nov 21 14:25:17 crc kubenswrapper[4774]: I1121 14:25:17.124488 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:25:17 crc kubenswrapper[4774]: W1121 14:25:17.679599 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75187d0f_77b0_45ee_a452_1850f0fe7851.slice/crio-b7c49052129643c386931d23bcd0eafc2f5eda9ba2208007bc2ea5f89298e0c5 WatchSource:0}: Error finding container b7c49052129643c386931d23bcd0eafc2f5eda9ba2208007bc2ea5f89298e0c5: Status 404 returned error can't find the container with id b7c49052129643c386931d23bcd0eafc2f5eda9ba2208007bc2ea5f89298e0c5 Nov 21 14:25:17 crc kubenswrapper[4774]: I1121 14:25:17.693573 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:25:17 crc kubenswrapper[4774]: I1121 14:25:17.826241 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.127280 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1e4dd31-d339-43bc-95a1-c35e7c14d933" path="/var/lib/kubelet/pods/b1e4dd31-d339-43bc-95a1-c35e7c14d933/volumes" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.128414 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5ea34fd-2d79-4102-bf44-99ba443fc794" path="/var/lib/kubelet/pods/f5ea34fd-2d79-4102-bf44-99ba443fc794/volumes" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.420994 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.422518 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.426946 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-jk6z2" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.427288 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.427392 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.441331 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.572782 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjm59\" (UniqueName: \"kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.573280 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config-secret\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.573348 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-combined-ca-bundle\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.573420 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.665323 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"75187d0f-77b0-45ee-a452-1850f0fe7851","Type":"ContainerStarted","Data":"cfabf58a3660a117c2e4bc8be1a895c8cb8999d2d918f92bec39bbee7161485e"} Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.665392 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"75187d0f-77b0-45ee-a452-1850f0fe7851","Type":"ContainerStarted","Data":"b7c49052129643c386931d23bcd0eafc2f5eda9ba2208007bc2ea5f89298e0c5"} Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.685828 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjm59\" (UniqueName: \"kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.685910 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config-secret\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.686000 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-combined-ca-bundle\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.686094 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.688235 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: E1121 14:25:18.691260 4774 projected.go:194] Error preparing data for projected volume kube-api-access-xjm59 for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 21 14:25:18 crc kubenswrapper[4774]: E1121 14:25:18.691374 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59 podName:429eba27-9a3e-481e-bb13-1ebb8f550431 nodeName:}" failed. No retries permitted until 2025-11-21 14:25:19.191346431 +0000 UTC m=+1309.843545690 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-xjm59" (UniqueName: "kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59") pod "openstackclient" (UID: "429eba27-9a3e-481e-bb13-1ebb8f550431") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.695783 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config-secret\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.714959 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:18 crc kubenswrapper[4774]: E1121 14:25:18.715881 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-xjm59], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="429eba27-9a3e-481e-bb13-1ebb8f550431" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.744398 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-combined-ca-bundle\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.763058 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.791522 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.792996 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.814347 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.843713 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.897526 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zmc9\" (UniqueName: \"kubernetes.io/projected/66b2a565-b48d-4b6f-8527-27326c13b522-kube-api-access-6zmc9\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.897578 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-combined-ca-bundle\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.897611 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config-secret\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.897695 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.947267 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-f4b4b6596-tc7n9"] Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.947567 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-f4b4b6596-tc7n9" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api-log" containerID="cri-o://3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f" gracePeriod=30 Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.947730 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-f4b4b6596-tc7n9" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api" containerID="cri-o://774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48" gracePeriod=30 Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.999161 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zmc9\" (UniqueName: \"kubernetes.io/projected/66b2a565-b48d-4b6f-8527-27326c13b522-kube-api-access-6zmc9\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.999220 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-combined-ca-bundle\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.999258 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config-secret\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:18 crc kubenswrapper[4774]: I1121 14:25:18.999308 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.000190 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.017705 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-combined-ca-bundle\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.038522 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config-secret\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.053687 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zmc9\" (UniqueName: \"kubernetes.io/projected/66b2a565-b48d-4b6f-8527-27326c13b522-kube-api-access-6zmc9\") pod \"openstackclient\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.158110 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.202179 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjm59\" (UniqueName: \"kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59\") pod \"openstackclient\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: E1121 14:25:19.205859 4774 projected.go:194] Error preparing data for projected volume kube-api-access-xjm59 for pod openstack/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (429eba27-9a3e-481e-bb13-1ebb8f550431) does not match the UID in record. The object might have been deleted and then recreated Nov 21 14:25:19 crc kubenswrapper[4774]: E1121 14:25:19.205944 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59 podName:429eba27-9a3e-481e-bb13-1ebb8f550431 nodeName:}" failed. No retries permitted until 2025-11-21 14:25:20.20592089 +0000 UTC m=+1310.858120149 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-xjm59" (UniqueName: "kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59") pod "openstackclient" (UID: "429eba27-9a3e-481e-bb13-1ebb8f550431") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (429eba27-9a3e-481e-bb13-1ebb8f550431) does not match the UID in record. The object might have been deleted and then recreated Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.677965 4774 generic.go:334] "Generic (PLEG): container finished" podID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerID="3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f" exitCode=143 Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.678048 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f4b4b6596-tc7n9" event={"ID":"13e01a2d-c480-4b53-849e-d9bd1ce28d15","Type":"ContainerDied","Data":"3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f"} Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.681242 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.681247 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"75187d0f-77b0-45ee-a452-1850f0fe7851","Type":"ContainerStarted","Data":"f284772158aa9afb2ac683ea5db800eb76ca8ee198f42ac67c00afb5d059483f"} Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.685989 4774 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="429eba27-9a3e-481e-bb13-1ebb8f550431" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.692242 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.718129 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.718096189 podStartE2EDuration="3.718096189s" podCreationTimestamp="2025-11-21 14:25:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:19.7025633 +0000 UTC m=+1310.354762559" watchObservedRunningTime="2025-11-21 14:25:19.718096189 +0000 UTC m=+1310.370295448" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.751595 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.816577 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-combined-ca-bundle\") pod \"429eba27-9a3e-481e-bb13-1ebb8f550431\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.816703 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config\") pod \"429eba27-9a3e-481e-bb13-1ebb8f550431\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.816778 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config-secret\") pod \"429eba27-9a3e-481e-bb13-1ebb8f550431\" (UID: \"429eba27-9a3e-481e-bb13-1ebb8f550431\") " Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.817237 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjm59\" (UniqueName: \"kubernetes.io/projected/429eba27-9a3e-481e-bb13-1ebb8f550431-kube-api-access-xjm59\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.820773 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "429eba27-9a3e-481e-bb13-1ebb8f550431" (UID: "429eba27-9a3e-481e-bb13-1ebb8f550431"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.827789 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "429eba27-9a3e-481e-bb13-1ebb8f550431" (UID: "429eba27-9a3e-481e-bb13-1ebb8f550431"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.845480 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "429eba27-9a3e-481e-bb13-1ebb8f550431" (UID: "429eba27-9a3e-481e-bb13-1ebb8f550431"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.919731 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.919977 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:19 crc kubenswrapper[4774]: I1121 14:25:19.920058 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/429eba27-9a3e-481e-bb13-1ebb8f550431-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.107561 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="429eba27-9a3e-481e-bb13-1ebb8f550431" path="/var/lib/kubelet/pods/429eba27-9a3e-481e-bb13-1ebb8f550431/volumes" Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.613479 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.614038 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-central-agent" containerID="cri-o://fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21" gracePeriod=30 Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.614274 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="proxy-httpd" containerID="cri-o://c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2" gracePeriod=30 Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.614347 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="sg-core" containerID="cri-o://aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397" gracePeriod=30 Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.614415 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-notification-agent" containerID="cri-o://6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a" gracePeriod=30 Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.622577 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.712875 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"66b2a565-b48d-4b6f-8527-27326c13b522","Type":"ContainerStarted","Data":"3c433dc661fde874ed7f26ecabf1f973458062d2d498017a76a0f6cc89b919e5"} Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.713245 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:25:20 crc kubenswrapper[4774]: I1121 14:25:20.729756 4774 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="429eba27-9a3e-481e-bb13-1ebb8f550431" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.404075 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-748c4cc85c-dkrhb"] Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.406594 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.415680 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.422050 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.422382 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.425523 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-748c4cc85c-dkrhb"] Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470074 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-combined-ca-bundle\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470156 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-config-data\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470223 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-public-tls-certs\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470254 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-run-httpd\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470631 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-internal-tls-certs\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470803 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470865 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-log-httpd\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.470898 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlm8b\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-kube-api-access-nlm8b\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573211 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573428 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-log-httpd\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573481 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlm8b\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-kube-api-access-nlm8b\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573692 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-combined-ca-bundle\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573791 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-config-data\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573948 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-public-tls-certs\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.573997 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-run-httpd\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.574107 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-log-httpd\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.574211 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-internal-tls-certs\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.574781 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-run-httpd\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.582022 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-config-data\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.582772 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-internal-tls-certs\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.582935 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-public-tls-certs\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.583490 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.589061 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-combined-ca-bundle\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.600664 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlm8b\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-kube-api-access-nlm8b\") pod \"swift-proxy-748c4cc85c-dkrhb\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.731060 4774 generic.go:334] "Generic (PLEG): container finished" podID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerID="c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2" exitCode=0 Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.731377 4774 generic.go:334] "Generic (PLEG): container finished" podID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerID="aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397" exitCode=2 Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.731144 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerDied","Data":"c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2"} Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.731435 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerDied","Data":"aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397"} Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.731449 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerDied","Data":"fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21"} Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.731387 4774 generic.go:334] "Generic (PLEG): container finished" podID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerID="fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21" exitCode=0 Nov 21 14:25:21 crc kubenswrapper[4774]: I1121 14:25:21.732769 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.125051 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.394590 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-748c4cc85c-dkrhb"] Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.414376 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.494687 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-scripts\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.495261 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-log-httpd\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.495309 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-config-data\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.495416 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-run-httpd\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.495471 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqjzh\" (UniqueName: \"kubernetes.io/projected/7f574865-7b2e-40c0-8e65-a554ca66b76d-kube-api-access-rqjzh\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.495507 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-sg-core-conf-yaml\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.495615 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-combined-ca-bundle\") pod \"7f574865-7b2e-40c0-8e65-a554ca66b76d\" (UID: \"7f574865-7b2e-40c0-8e65-a554ca66b76d\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.500110 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-scripts" (OuterVolumeSpecName: "scripts") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.507911 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.512644 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f574865-7b2e-40c0-8e65-a554ca66b76d-kube-api-access-rqjzh" (OuterVolumeSpecName: "kube-api-access-rqjzh") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "kube-api-access-rqjzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.522119 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.535574 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.600724 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.600763 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.600774 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7f574865-7b2e-40c0-8e65-a554ca66b76d-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.600784 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqjzh\" (UniqueName: \"kubernetes.io/projected/7f574865-7b2e-40c0-8e65-a554ca66b76d-kube-api-access-rqjzh\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.600796 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.612017 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.615368 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.666164 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-config-data" (OuterVolumeSpecName: "config-data") pod "7f574865-7b2e-40c0-8e65-a554ca66b76d" (UID: "7f574865-7b2e-40c0-8e65-a554ca66b76d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.701931 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6nnj\" (UniqueName: \"kubernetes.io/projected/13e01a2d-c480-4b53-849e-d9bd1ce28d15-kube-api-access-b6nnj\") pod \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.701989 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e01a2d-c480-4b53-849e-d9bd1ce28d15-logs\") pod \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.702159 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data-custom\") pod \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.702199 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data\") pod \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.702362 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-combined-ca-bundle\") pod \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\" (UID: \"13e01a2d-c480-4b53-849e-d9bd1ce28d15\") " Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.702803 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.702845 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f574865-7b2e-40c0-8e65-a554ca66b76d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.703082 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13e01a2d-c480-4b53-849e-d9bd1ce28d15-logs" (OuterVolumeSpecName: "logs") pod "13e01a2d-c480-4b53-849e-d9bd1ce28d15" (UID: "13e01a2d-c480-4b53-849e-d9bd1ce28d15"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.708653 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e01a2d-c480-4b53-849e-d9bd1ce28d15-kube-api-access-b6nnj" (OuterVolumeSpecName: "kube-api-access-b6nnj") pod "13e01a2d-c480-4b53-849e-d9bd1ce28d15" (UID: "13e01a2d-c480-4b53-849e-d9bd1ce28d15"). InnerVolumeSpecName "kube-api-access-b6nnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.709116 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "13e01a2d-c480-4b53-849e-d9bd1ce28d15" (UID: "13e01a2d-c480-4b53-849e-d9bd1ce28d15"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.752617 4774 generic.go:334] "Generic (PLEG): container finished" podID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerID="6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a" exitCode=0 Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.752692 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerDied","Data":"6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a"} Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.752920 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7f574865-7b2e-40c0-8e65-a554ca66b76d","Type":"ContainerDied","Data":"42c746973c1c44c63502c7a509116387c1c6c6a6c0b8d19c46fa5191ca7403d3"} Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.752951 4774 scope.go:117] "RemoveContainer" containerID="c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.753568 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.764225 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13e01a2d-c480-4b53-849e-d9bd1ce28d15" (UID: "13e01a2d-c480-4b53-849e-d9bd1ce28d15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.767493 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-748c4cc85c-dkrhb" event={"ID":"4dd9e6d7-d0b1-49f3-920a-34e434835bfa","Type":"ContainerStarted","Data":"07fdc4d92fd456dd661517ac6b3f8e13a9e0883a4bda9fd068793831cd9ace5e"} Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.771159 4774 generic.go:334] "Generic (PLEG): container finished" podID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerID="774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48" exitCode=0 Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.771220 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f4b4b6596-tc7n9" event={"ID":"13e01a2d-c480-4b53-849e-d9bd1ce28d15","Type":"ContainerDied","Data":"774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48"} Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.771269 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-f4b4b6596-tc7n9" event={"ID":"13e01a2d-c480-4b53-849e-d9bd1ce28d15","Type":"ContainerDied","Data":"348a1cbf3a2dcfb003437ceb8d2bb1c441ac16d1e777f1b97e722e6c4d65a46f"} Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.771775 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-f4b4b6596-tc7n9" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.789287 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data" (OuterVolumeSpecName: "config-data") pod "13e01a2d-c480-4b53-849e-d9bd1ce28d15" (UID: "13e01a2d-c480-4b53-849e-d9bd1ce28d15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.804484 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.804523 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.804536 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13e01a2d-c480-4b53-849e-d9bd1ce28d15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.804551 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6nnj\" (UniqueName: \"kubernetes.io/projected/13e01a2d-c480-4b53-849e-d9bd1ce28d15-kube-api-access-b6nnj\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.804563 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/13e01a2d-c480-4b53-849e-d9bd1ce28d15-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.807251 4774 scope.go:117] "RemoveContainer" containerID="aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.833576 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.847354 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.889042 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:22 crc kubenswrapper[4774]: E1121 14:25:22.892120 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="proxy-httpd" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.892149 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="proxy-httpd" Nov 21 14:25:22 crc kubenswrapper[4774]: E1121 14:25:22.892206 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-central-agent" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.892213 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-central-agent" Nov 21 14:25:22 crc kubenswrapper[4774]: E1121 14:25:22.892237 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="sg-core" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.892245 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="sg-core" Nov 21 14:25:22 crc kubenswrapper[4774]: E1121 14:25:22.892291 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.892299 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api" Nov 21 14:25:22 crc kubenswrapper[4774]: E1121 14:25:22.892319 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-notification-agent" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.892328 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-notification-agent" Nov 21 14:25:22 crc kubenswrapper[4774]: E1121 14:25:22.892355 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api-log" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.892362 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api-log" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.895131 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="proxy-httpd" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.895150 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.895175 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" containerName="barbican-api-log" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.895195 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-notification-agent" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.895208 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="ceilometer-central-agent" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.895231 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" containerName="sg-core" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.918683 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.926852 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.929999 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:25:22 crc kubenswrapper[4774]: I1121 14:25:22.930935 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.010482 4774 scope.go:117] "RemoveContainer" containerID="6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.013340 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.013427 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-scripts\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.013461 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmw9j\" (UniqueName: \"kubernetes.io/projected/211865db-84d2-4778-b18b-49d80a63b332-kube-api-access-mmw9j\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.013800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-log-httpd\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.013905 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-run-httpd\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.013985 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-config-data\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.014033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.047268 4774 scope.go:117] "RemoveContainer" containerID="fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.096895 4774 scope.go:117] "RemoveContainer" containerID="c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2" Nov 21 14:25:23 crc kubenswrapper[4774]: E1121 14:25:23.109285 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2\": container with ID starting with c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2 not found: ID does not exist" containerID="c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.109354 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2"} err="failed to get container status \"c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2\": rpc error: code = NotFound desc = could not find container \"c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2\": container with ID starting with c0e0a723864d395a841409da314f9d5ac1dfb9c47d8436561655708da26db1a2 not found: ID does not exist" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.109400 4774 scope.go:117] "RemoveContainer" containerID="aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397" Nov 21 14:25:23 crc kubenswrapper[4774]: E1121 14:25:23.110912 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397\": container with ID starting with aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397 not found: ID does not exist" containerID="aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.110981 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397"} err="failed to get container status \"aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397\": rpc error: code = NotFound desc = could not find container \"aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397\": container with ID starting with aa6e00116a3d76dc3fb54082b3565dadbda71ecab6f8785185dd948731b3c397 not found: ID does not exist" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.111031 4774 scope.go:117] "RemoveContainer" containerID="6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a" Nov 21 14:25:23 crc kubenswrapper[4774]: E1121 14:25:23.111644 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a\": container with ID starting with 6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a not found: ID does not exist" containerID="6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.111673 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a"} err="failed to get container status \"6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a\": rpc error: code = NotFound desc = could not find container \"6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a\": container with ID starting with 6fead203fc98a0f9e931bbdbb1d91114077e077e7b6482b84496e7c4adf7c67a not found: ID does not exist" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.111691 4774 scope.go:117] "RemoveContainer" containerID="fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21" Nov 21 14:25:23 crc kubenswrapper[4774]: E1121 14:25:23.112136 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21\": container with ID starting with fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21 not found: ID does not exist" containerID="fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.112171 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21"} err="failed to get container status \"fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21\": rpc error: code = NotFound desc = could not find container \"fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21\": container with ID starting with fbaff476de43230bcab994d8454bd96d3dfa16e396de2b069bbc72301e790e21 not found: ID does not exist" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.112190 4774 scope.go:117] "RemoveContainer" containerID="774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117128 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-log-httpd\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-run-httpd\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117275 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-config-data\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117324 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117492 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117685 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-scripts\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117700 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-run-httpd\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117759 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmw9j\" (UniqueName: \"kubernetes.io/projected/211865db-84d2-4778-b18b-49d80a63b332-kube-api-access-mmw9j\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.117612 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-log-httpd\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.125143 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.125466 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-config-data\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.128400 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-f4b4b6596-tc7n9"] Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.131001 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.138889 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmw9j\" (UniqueName: \"kubernetes.io/projected/211865db-84d2-4778-b18b-49d80a63b332-kube-api-access-mmw9j\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.143387 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-f4b4b6596-tc7n9"] Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.156431 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-scripts\") pod \"ceilometer-0\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.226995 4774 scope.go:117] "RemoveContainer" containerID="3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.257014 4774 scope.go:117] "RemoveContainer" containerID="774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48" Nov 21 14:25:23 crc kubenswrapper[4774]: E1121 14:25:23.262262 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48\": container with ID starting with 774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48 not found: ID does not exist" containerID="774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.262342 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48"} err="failed to get container status \"774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48\": rpc error: code = NotFound desc = could not find container \"774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48\": container with ID starting with 774978f8bb053de1f226a07f7f0583f65a74c4b9597153634337b03972e1ea48 not found: ID does not exist" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.262392 4774 scope.go:117] "RemoveContainer" containerID="3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f" Nov 21 14:25:23 crc kubenswrapper[4774]: E1121 14:25:23.262899 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f\": container with ID starting with 3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f not found: ID does not exist" containerID="3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.262944 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f"} err="failed to get container status \"3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f\": rpc error: code = NotFound desc = could not find container \"3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f\": container with ID starting with 3f4423e1839b0ba923849684229568c5619f1ad3bde9c00135c492bf4b4b1b1f not found: ID does not exist" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.334906 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.788066 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-748c4cc85c-dkrhb" event={"ID":"4dd9e6d7-d0b1-49f3-920a-34e434835bfa","Type":"ContainerStarted","Data":"8dd78826a36d2a0c84007fefea4af9b39c62687e997e7371678a93eeba3ce1aa"} Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.788533 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-748c4cc85c-dkrhb" event={"ID":"4dd9e6d7-d0b1-49f3-920a-34e434835bfa","Type":"ContainerStarted","Data":"969fbf4f7d7b2be68e80f13bc613bae47954c1d9cf0870455b84d0a4bc6e18ef"} Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.790238 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.790283 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.824955 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-748c4cc85c-dkrhb" podStartSLOduration=2.824920344 podStartE2EDuration="2.824920344s" podCreationTimestamp="2025-11-21 14:25:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:23.816979895 +0000 UTC m=+1314.469179174" watchObservedRunningTime="2025-11-21 14:25:23.824920344 +0000 UTC m=+1314.477119603" Nov 21 14:25:23 crc kubenswrapper[4774]: I1121 14:25:23.956463 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:23 crc kubenswrapper[4774]: W1121 14:25:23.968068 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod211865db_84d2_4778_b18b_49d80a63b332.slice/crio-a444409c2b29a60ca404c742d606d7100b6c3794a9ca35533efb74ca4943e97e WatchSource:0}: Error finding container a444409c2b29a60ca404c742d606d7100b6c3794a9ca35533efb74ca4943e97e: Status 404 returned error can't find the container with id a444409c2b29a60ca404c742d606d7100b6c3794a9ca35533efb74ca4943e97e Nov 21 14:25:24 crc kubenswrapper[4774]: I1121 14:25:24.105114 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13e01a2d-c480-4b53-849e-d9bd1ce28d15" path="/var/lib/kubelet/pods/13e01a2d-c480-4b53-849e-d9bd1ce28d15/volumes" Nov 21 14:25:24 crc kubenswrapper[4774]: I1121 14:25:24.105808 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f574865-7b2e-40c0-8e65-a554ca66b76d" path="/var/lib/kubelet/pods/7f574865-7b2e-40c0-8e65-a554ca66b76d/volumes" Nov 21 14:25:24 crc kubenswrapper[4774]: I1121 14:25:24.815954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerStarted","Data":"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337"} Nov 21 14:25:24 crc kubenswrapper[4774]: I1121 14:25:24.816348 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerStarted","Data":"a444409c2b29a60ca404c742d606d7100b6c3794a9ca35533efb74ca4943e97e"} Nov 21 14:25:25 crc kubenswrapper[4774]: I1121 14:25:25.828329 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerStarted","Data":"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38"} Nov 21 14:25:27 crc kubenswrapper[4774]: I1121 14:25:27.413433 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 21 14:25:29 crc kubenswrapper[4774]: I1121 14:25:29.600812 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:25:29 crc kubenswrapper[4774]: I1121 14:25:29.601275 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:25:29 crc kubenswrapper[4774]: I1121 14:25:29.635580 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:31 crc kubenswrapper[4774]: I1121 14:25:31.744031 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:31 crc kubenswrapper[4774]: I1121 14:25:31.744839 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:25:32 crc kubenswrapper[4774]: I1121 14:25:32.917319 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"66b2a565-b48d-4b6f-8527-27326c13b522","Type":"ContainerStarted","Data":"dfbedbce9f436ba2d67af7321bdcc2d58c19cd6c6808e9ddda88da0a09afd9af"} Nov 21 14:25:32 crc kubenswrapper[4774]: I1121 14:25:32.921790 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerStarted","Data":"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5"} Nov 21 14:25:32 crc kubenswrapper[4774]: I1121 14:25:32.940367 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.070848654 podStartE2EDuration="14.940334751s" podCreationTimestamp="2025-11-21 14:25:18 +0000 UTC" firstStartedPulling="2025-11-21 14:25:19.763933965 +0000 UTC m=+1310.416133244" lastFinishedPulling="2025-11-21 14:25:31.633420082 +0000 UTC m=+1322.285619341" observedRunningTime="2025-11-21 14:25:32.935398659 +0000 UTC m=+1323.587597918" watchObservedRunningTime="2025-11-21 14:25:32.940334751 +0000 UTC m=+1323.592534010" Nov 21 14:25:33 crc kubenswrapper[4774]: I1121 14:25:33.952961 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerStarted","Data":"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5"} Nov 21 14:25:33 crc kubenswrapper[4774]: I1121 14:25:33.953209 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-central-agent" containerID="cri-o://e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" gracePeriod=30 Nov 21 14:25:33 crc kubenswrapper[4774]: I1121 14:25:33.953493 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:25:33 crc kubenswrapper[4774]: I1121 14:25:33.953616 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="proxy-httpd" containerID="cri-o://e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" gracePeriod=30 Nov 21 14:25:33 crc kubenswrapper[4774]: I1121 14:25:33.953692 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="sg-core" containerID="cri-o://acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" gracePeriod=30 Nov 21 14:25:33 crc kubenswrapper[4774]: I1121 14:25:33.953740 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-notification-agent" containerID="cri-o://963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" gracePeriod=30 Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.023032 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.718156412 podStartE2EDuration="12.023005417s" podCreationTimestamp="2025-11-21 14:25:22 +0000 UTC" firstStartedPulling="2025-11-21 14:25:23.971216354 +0000 UTC m=+1314.623415613" lastFinishedPulling="2025-11-21 14:25:33.276065369 +0000 UTC m=+1323.928264618" observedRunningTime="2025-11-21 14:25:34.000455045 +0000 UTC m=+1324.652654324" watchObservedRunningTime="2025-11-21 14:25:34.023005417 +0000 UTC m=+1324.675204676" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.831001 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.873703 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928266 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928326 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmw9j\" (UniqueName: \"kubernetes.io/projected/211865db-84d2-4778-b18b-49d80a63b332-kube-api-access-mmw9j\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928423 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-log-httpd\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928454 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-combined-ca-bundle\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928490 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-logs\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928519 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-sg-core-conf-yaml\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928548 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data-custom\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928575 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jh2hd\" (UniqueName: \"kubernetes.io/projected/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-kube-api-access-jh2hd\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928601 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-combined-ca-bundle\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928638 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-config-data\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928657 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-scripts\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928712 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-run-httpd\") pod \"211865db-84d2-4778-b18b-49d80a63b332\" (UID: \"211865db-84d2-4778-b18b-49d80a63b332\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928784 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-scripts\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.928865 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-etc-machine-id\") pod \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\" (UID: \"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2\") " Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.929148 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-logs" (OuterVolumeSpecName: "logs") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.929252 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.929650 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.929679 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.930156 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.931165 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.935575 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-scripts" (OuterVolumeSpecName: "scripts") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.937083 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/211865db-84d2-4778-b18b-49d80a63b332-kube-api-access-mmw9j" (OuterVolumeSpecName: "kube-api-access-mmw9j") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "kube-api-access-mmw9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.937252 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-scripts" (OuterVolumeSpecName: "scripts") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.938100 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-kube-api-access-jh2hd" (OuterVolumeSpecName: "kube-api-access-jh2hd") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "kube-api-access-jh2hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.938163 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.961959 4774 generic.go:334] "Generic (PLEG): container finished" podID="211865db-84d2-4778-b18b-49d80a63b332" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" exitCode=0 Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.961999 4774 generic.go:334] "Generic (PLEG): container finished" podID="211865db-84d2-4778-b18b-49d80a63b332" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" exitCode=2 Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962011 4774 generic.go:334] "Generic (PLEG): container finished" podID="211865db-84d2-4778-b18b-49d80a63b332" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" exitCode=0 Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962019 4774 generic.go:334] "Generic (PLEG): container finished" podID="211865db-84d2-4778-b18b-49d80a63b332" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" exitCode=0 Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962084 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962316 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerDied","Data":"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerDied","Data":"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962396 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerDied","Data":"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962407 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerDied","Data":"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962418 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"211865db-84d2-4778-b18b-49d80a63b332","Type":"ContainerDied","Data":"a444409c2b29a60ca404c742d606d7100b6c3794a9ca35533efb74ca4943e97e"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.962430 4774 scope.go:117] "RemoveContainer" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.967650 4774 generic.go:334] "Generic (PLEG): container finished" podID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerID="a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0" exitCode=137 Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.967691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2","Type":"ContainerDied","Data":"a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.967733 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c0c38a76-c3f1-414c-a1bf-d24a37e29ea2","Type":"ContainerDied","Data":"f5abe4b6e11635212e5f7ae68eadb46313737d58937e7af55ba15dbc727e7379"} Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.967750 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.979244 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:34 crc kubenswrapper[4774]: I1121 14:25:34.990138 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.005183 4774 scope.go:117] "RemoveContainer" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.012802 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data" (OuterVolumeSpecName: "config-data") pod "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" (UID: "c0c38a76-c3f1-414c-a1bf-d24a37e29ea2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031260 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031297 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jh2hd\" (UniqueName: \"kubernetes.io/projected/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-kube-api-access-jh2hd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031311 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031322 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031332 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031344 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031358 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031371 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmw9j\" (UniqueName: \"kubernetes.io/projected/211865db-84d2-4778-b18b-49d80a63b332-kube-api-access-mmw9j\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031382 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/211865db-84d2-4778-b18b-49d80a63b332-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.031397 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.032865 4774 scope.go:117] "RemoveContainer" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.052718 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.061714 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-config-data" (OuterVolumeSpecName: "config-data") pod "211865db-84d2-4778-b18b-49d80a63b332" (UID: "211865db-84d2-4778-b18b-49d80a63b332"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.062900 4774 scope.go:117] "RemoveContainer" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.084036 4774 scope.go:117] "RemoveContainer" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.084764 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": container with ID starting with e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5 not found: ID does not exist" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.084797 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5"} err="failed to get container status \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": rpc error: code = NotFound desc = could not find container \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": container with ID starting with e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.084857 4774 scope.go:117] "RemoveContainer" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.085281 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": container with ID starting with acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5 not found: ID does not exist" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.085345 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5"} err="failed to get container status \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": rpc error: code = NotFound desc = could not find container \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": container with ID starting with acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.085361 4774 scope.go:117] "RemoveContainer" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.085655 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": container with ID starting with 963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38 not found: ID does not exist" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.085673 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38"} err="failed to get container status \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": rpc error: code = NotFound desc = could not find container \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": container with ID starting with 963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.085685 4774 scope.go:117] "RemoveContainer" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.086758 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": container with ID starting with e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337 not found: ID does not exist" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.086782 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337"} err="failed to get container status \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": rpc error: code = NotFound desc = could not find container \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": container with ID starting with e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.086879 4774 scope.go:117] "RemoveContainer" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087078 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5"} err="failed to get container status \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": rpc error: code = NotFound desc = could not find container \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": container with ID starting with e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087094 4774 scope.go:117] "RemoveContainer" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087406 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5"} err="failed to get container status \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": rpc error: code = NotFound desc = could not find container \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": container with ID starting with acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087424 4774 scope.go:117] "RemoveContainer" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087685 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38"} err="failed to get container status \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": rpc error: code = NotFound desc = could not find container \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": container with ID starting with 963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087710 4774 scope.go:117] "RemoveContainer" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087936 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337"} err="failed to get container status \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": rpc error: code = NotFound desc = could not find container \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": container with ID starting with e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.087951 4774 scope.go:117] "RemoveContainer" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.088400 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5"} err="failed to get container status \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": rpc error: code = NotFound desc = could not find container \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": container with ID starting with e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.088417 4774 scope.go:117] "RemoveContainer" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.089570 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5"} err="failed to get container status \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": rpc error: code = NotFound desc = could not find container \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": container with ID starting with acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.089593 4774 scope.go:117] "RemoveContainer" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.089877 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38"} err="failed to get container status \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": rpc error: code = NotFound desc = could not find container \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": container with ID starting with 963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.089898 4774 scope.go:117] "RemoveContainer" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.090222 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337"} err="failed to get container status \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": rpc error: code = NotFound desc = could not find container \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": container with ID starting with e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.090245 4774 scope.go:117] "RemoveContainer" containerID="e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.090529 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5"} err="failed to get container status \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": rpc error: code = NotFound desc = could not find container \"e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5\": container with ID starting with e60c2819f806d24a2bd55ffdb9b8e0cd3512ed2352a4531a08aa72f922afd0d5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.090545 4774 scope.go:117] "RemoveContainer" containerID="acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.090750 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5"} err="failed to get container status \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": rpc error: code = NotFound desc = could not find container \"acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5\": container with ID starting with acac1b1be04ccb9baaf556b2cc59d0b7e0417608adf33cf80fa2c7338c7fa9b5 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.090765 4774 scope.go:117] "RemoveContainer" containerID="963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.091033 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38"} err="failed to get container status \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": rpc error: code = NotFound desc = could not find container \"963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38\": container with ID starting with 963f98b013cdfe83e535304e48f385d56ccb66afec80616ff37c22899c2a6a38 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.091100 4774 scope.go:117] "RemoveContainer" containerID="e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.091371 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337"} err="failed to get container status \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": rpc error: code = NotFound desc = could not find container \"e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337\": container with ID starting with e5eeaeb5183aec7ae2a1f275ad0f3f9072ada4b9b52643f9d75f1c3418151337 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.091391 4774 scope.go:117] "RemoveContainer" containerID="a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.113237 4774 scope.go:117] "RemoveContainer" containerID="a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.133326 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.133697 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/211865db-84d2-4778-b18b-49d80a63b332-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.134812 4774 scope.go:117] "RemoveContainer" containerID="a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.135737 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0\": container with ID starting with a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0 not found: ID does not exist" containerID="a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.135765 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0"} err="failed to get container status \"a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0\": rpc error: code = NotFound desc = could not find container \"a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0\": container with ID starting with a5dc086a782462940d35eb01773d7251a4a265c92efd9f3ce699dbcf2d630db0 not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.135791 4774 scope.go:117] "RemoveContainer" containerID="a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.136290 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f\": container with ID starting with a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f not found: ID does not exist" containerID="a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.136321 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f"} err="failed to get container status \"a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f\": rpc error: code = NotFound desc = could not find container \"a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f\": container with ID starting with a4808ec29ebde9682979bc29a00db8b1e66908160879fb0bc22ee1c05b7f1c8f not found: ID does not exist" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.307733 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.327707 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.338214 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.348019 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.363361 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.363969 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="proxy-httpd" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.363999 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="proxy-httpd" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.364024 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="sg-core" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364031 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="sg-core" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.364051 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364059 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.364090 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-central-agent" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364096 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-central-agent" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.364105 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-notification-agent" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364111 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-notification-agent" Nov 21 14:25:35 crc kubenswrapper[4774]: E1121 14:25:35.364125 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api-log" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364160 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api-log" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364333 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-central-agent" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364349 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="proxy-httpd" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364361 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api-log" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364372 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="ceilometer-notification-agent" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364387 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="211865db-84d2-4778-b18b-49d80a63b332" containerName="sg-core" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.364397 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" containerName="cinder-api" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.365651 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.371113 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.375916 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.376191 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.384469 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.387100 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.391776 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.392477 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.399407 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.421209 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.438545 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-config-data\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.438605 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-scripts\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.438644 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.438670 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.438718 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7bf981c0-8ff6-493c-a5fc-14610df3b362-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.438951 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-log-httpd\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439015 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lnj2b\" (UniqueName: \"kubernetes.io/projected/23d00c9c-1862-49ae-94ec-235c331fc220-kube-api-access-lnj2b\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439064 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439144 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-scripts\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439256 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkfm5\" (UniqueName: \"kubernetes.io/projected/7bf981c0-8ff6-493c-a5fc-14610df3b362-kube-api-access-bkfm5\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439346 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439404 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-run-httpd\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439521 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439552 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bf981c0-8ff6-493c-a5fc-14610df3b362-logs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439598 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data-custom\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.439635 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.541901 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7bf981c0-8ff6-493c-a5fc-14610df3b362-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.541970 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-log-httpd\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.541994 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lnj2b\" (UniqueName: \"kubernetes.io/projected/23d00c9c-1862-49ae-94ec-235c331fc220-kube-api-access-lnj2b\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542023 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542051 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-scripts\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542086 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkfm5\" (UniqueName: \"kubernetes.io/projected/7bf981c0-8ff6-493c-a5fc-14610df3b362-kube-api-access-bkfm5\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542124 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542147 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-run-httpd\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542195 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542213 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bf981c0-8ff6-493c-a5fc-14610df3b362-logs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542240 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data-custom\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542260 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542279 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-config-data\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542300 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-scripts\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542323 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.542341 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.543514 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-run-httpd\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.544080 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7bf981c0-8ff6-493c-a5fc-14610df3b362-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.544776 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bf981c0-8ff6-493c-a5fc-14610df3b362-logs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.546490 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.548259 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.549145 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-scripts\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.549866 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data-custom\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.550120 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.550620 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-log-httpd\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.550761 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.553153 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.553362 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.554217 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-scripts\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.565042 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-config-data\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.568702 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkfm5\" (UniqueName: \"kubernetes.io/projected/7bf981c0-8ff6-493c-a5fc-14610df3b362-kube-api-access-bkfm5\") pod \"cinder-api-0\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.573707 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lnj2b\" (UniqueName: \"kubernetes.io/projected/23d00c9c-1862-49ae-94ec-235c331fc220-kube-api-access-lnj2b\") pod \"ceilometer-0\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " pod="openstack/ceilometer-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.690555 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:25:35 crc kubenswrapper[4774]: I1121 14:25:35.720209 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.108703 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="211865db-84d2-4778-b18b-49d80a63b332" path="/var/lib/kubelet/pods/211865db-84d2-4778-b18b-49d80a63b332/volumes" Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.110445 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0c38a76-c3f1-414c-a1bf-d24a37e29ea2" path="/var/lib/kubelet/pods/c0c38a76-c3f1-414c-a1bf-d24a37e29ea2/volumes" Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.217747 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.225538 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:25:36 crc kubenswrapper[4774]: W1121 14:25:36.226295 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23d00c9c_1862_49ae_94ec_235c331fc220.slice/crio-41c1f908c661578a38d5d5b73afd923d3d8fafe7ba4342f57049b10a2fa8a729 WatchSource:0}: Error finding container 41c1f908c661578a38d5d5b73afd923d3d8fafe7ba4342f57049b10a2fa8a729: Status 404 returned error can't find the container with id 41c1f908c661578a38d5d5b73afd923d3d8fafe7ba4342f57049b10a2fa8a729 Nov 21 14:25:36 crc kubenswrapper[4774]: W1121 14:25:36.227303 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7bf981c0_8ff6_493c_a5fc_14610df3b362.slice/crio-94071897eb8abe21f8bb237106f7040b688a521ef846a129b6e26c177a4810e6 WatchSource:0}: Error finding container 94071897eb8abe21f8bb237106f7040b688a521ef846a129b6e26c177a4810e6: Status 404 returned error can't find the container with id 94071897eb8abe21f8bb237106f7040b688a521ef846a129b6e26c177a4810e6 Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.834223 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.835319 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-log" containerID="cri-o://d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d" gracePeriod=30 Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.835530 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-httpd" containerID="cri-o://0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc" gracePeriod=30 Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.994902 4774 generic.go:334] "Generic (PLEG): container finished" podID="0171c979-def5-4c7e-8551-cd40d008e88e" containerID="d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d" exitCode=143 Nov 21 14:25:36 crc kubenswrapper[4774]: I1121 14:25:36.995002 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0171c979-def5-4c7e-8551-cd40d008e88e","Type":"ContainerDied","Data":"d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d"} Nov 21 14:25:37 crc kubenswrapper[4774]: I1121 14:25:37.007051 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerStarted","Data":"41c1f908c661578a38d5d5b73afd923d3d8fafe7ba4342f57049b10a2fa8a729"} Nov 21 14:25:37 crc kubenswrapper[4774]: I1121 14:25:37.015643 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7bf981c0-8ff6-493c-a5fc-14610df3b362","Type":"ContainerStarted","Data":"400661145f174c0b9169da8eed8077bb5592eced771c3239659552c524ba7eb9"} Nov 21 14:25:37 crc kubenswrapper[4774]: I1121 14:25:37.015703 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7bf981c0-8ff6-493c-a5fc-14610df3b362","Type":"ContainerStarted","Data":"94071897eb8abe21f8bb237106f7040b688a521ef846a129b6e26c177a4810e6"} Nov 21 14:25:38 crc kubenswrapper[4774]: I1121 14:25:38.029500 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7bf981c0-8ff6-493c-a5fc-14610df3b362","Type":"ContainerStarted","Data":"038ee870a823946f43bd1d652272038621a6567ad96155f489796343f86963d7"} Nov 21 14:25:38 crc kubenswrapper[4774]: I1121 14:25:38.030195 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 21 14:25:38 crc kubenswrapper[4774]: I1121 14:25:38.042298 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerStarted","Data":"62c1d01045b4f96b759839166bc5cce863a04e117255ade6286803b8c10a5d7e"} Nov 21 14:25:38 crc kubenswrapper[4774]: I1121 14:25:38.042347 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerStarted","Data":"7ede4691a038e33a7b99a01d7bb07a6aa4b6971e8064bb3c77ae9c5425a60ccd"} Nov 21 14:25:38 crc kubenswrapper[4774]: I1121 14:25:38.062756 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.062733482 podStartE2EDuration="3.062733482s" podCreationTimestamp="2025-11-21 14:25:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:38.052840226 +0000 UTC m=+1328.705039485" watchObservedRunningTime="2025-11-21 14:25:38.062733482 +0000 UTC m=+1328.714932741" Nov 21 14:25:39 crc kubenswrapper[4774]: I1121 14:25:39.007740 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:25:39 crc kubenswrapper[4774]: I1121 14:25:39.008515 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-log" containerID="cri-o://9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514" gracePeriod=30 Nov 21 14:25:39 crc kubenswrapper[4774]: I1121 14:25:39.009000 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-httpd" containerID="cri-o://5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa" gracePeriod=30 Nov 21 14:25:39 crc kubenswrapper[4774]: I1121 14:25:39.061410 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerStarted","Data":"0c06047f997e502aab0905ee4d6756e05bfabfa41ffbb238eae6650b6598a01c"} Nov 21 14:25:39 crc kubenswrapper[4774]: I1121 14:25:39.791352 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.084354 4774 generic.go:334] "Generic (PLEG): container finished" podID="aecb7435-3e87-4623-ad69-f322836314a3" containerID="9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514" exitCode=143 Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.084419 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"aecb7435-3e87-4623-ad69-f322836314a3","Type":"ContainerDied","Data":"9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514"} Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.261577 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.149:9292/healthcheck\": read tcp 10.217.0.2:48612->10.217.0.149:9292: read: connection reset by peer" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.261636 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.149:9292/healthcheck\": read tcp 10.217.0.2:48628->10.217.0.149:9292: read: connection reset by peer" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.808157 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.858610 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-httpd-run\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.858691 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-combined-ca-bundle\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.858846 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4f5n\" (UniqueName: \"kubernetes.io/projected/0171c979-def5-4c7e-8551-cd40d008e88e-kube-api-access-x4f5n\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.858972 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-logs\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.859006 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.859100 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-public-tls-certs\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.859145 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-config-data\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.859199 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-scripts\") pod \"0171c979-def5-4c7e-8551-cd40d008e88e\" (UID: \"0171c979-def5-4c7e-8551-cd40d008e88e\") " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.861239 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-logs" (OuterVolumeSpecName: "logs") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.861563 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.867249 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-scripts" (OuterVolumeSpecName: "scripts") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.873068 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0171c979-def5-4c7e-8551-cd40d008e88e-kube-api-access-x4f5n" (OuterVolumeSpecName: "kube-api-access-x4f5n") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "kube-api-access-x4f5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.878726 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.899960 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.937710 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.953771 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-config-data" (OuterVolumeSpecName: "config-data") pod "0171c979-def5-4c7e-8551-cd40d008e88e" (UID: "0171c979-def5-4c7e-8551-cd40d008e88e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962161 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4f5n\" (UniqueName: \"kubernetes.io/projected/0171c979-def5-4c7e-8551-cd40d008e88e-kube-api-access-x4f5n\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962202 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962241 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962253 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962266 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962274 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962285 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0171c979-def5-4c7e-8551-cd40d008e88e-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.962295 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0171c979-def5-4c7e-8551-cd40d008e88e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:40 crc kubenswrapper[4774]: I1121 14:25:40.980894 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.064662 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.096036 4774 generic.go:334] "Generic (PLEG): container finished" podID="0171c979-def5-4c7e-8551-cd40d008e88e" containerID="0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc" exitCode=0 Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.096097 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.096111 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0171c979-def5-4c7e-8551-cd40d008e88e","Type":"ContainerDied","Data":"0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc"} Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.096154 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0171c979-def5-4c7e-8551-cd40d008e88e","Type":"ContainerDied","Data":"1ae4a65c111b21a69f52bc649b89dbe7c6443a9ab8b9a9bbd35c9f4df8f8e32b"} Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.096174 4774 scope.go:117] "RemoveContainer" containerID="0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.099957 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerStarted","Data":"e0ea84b2a23cdc1ec4c203d02f89a0f6ca75c3c21faf3eb83c3dab086828478a"} Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.100076 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-central-agent" containerID="cri-o://7ede4691a038e33a7b99a01d7bb07a6aa4b6971e8064bb3c77ae9c5425a60ccd" gracePeriod=30 Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.100088 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.100109 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="sg-core" containerID="cri-o://0c06047f997e502aab0905ee4d6756e05bfabfa41ffbb238eae6650b6598a01c" gracePeriod=30 Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.100115 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-notification-agent" containerID="cri-o://62c1d01045b4f96b759839166bc5cce863a04e117255ade6286803b8c10a5d7e" gracePeriod=30 Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.100079 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="proxy-httpd" containerID="cri-o://e0ea84b2a23cdc1ec4c203d02f89a0f6ca75c3c21faf3eb83c3dab086828478a" gracePeriod=30 Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.129353 4774 scope.go:117] "RemoveContainer" containerID="d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.133911 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.228592864 podStartE2EDuration="6.133889223s" podCreationTimestamp="2025-11-21 14:25:35 +0000 UTC" firstStartedPulling="2025-11-21 14:25:36.230924907 +0000 UTC m=+1326.883124166" lastFinishedPulling="2025-11-21 14:25:40.136221266 +0000 UTC m=+1330.788420525" observedRunningTime="2025-11-21 14:25:41.12825336 +0000 UTC m=+1331.780452629" watchObservedRunningTime="2025-11-21 14:25:41.133889223 +0000 UTC m=+1331.786088482" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.173748 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.194973 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.208720 4774 scope.go:117] "RemoveContainer" containerID="0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc" Nov 21 14:25:41 crc kubenswrapper[4774]: E1121 14:25:41.212035 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc\": container with ID starting with 0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc not found: ID does not exist" containerID="0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.212098 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc"} err="failed to get container status \"0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc\": rpc error: code = NotFound desc = could not find container \"0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc\": container with ID starting with 0cb9a9bbd4b0ec1987c62ffdbd5acba1dcd1dbe5195626357b8cb3716ae708cc not found: ID does not exist" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.212141 4774 scope.go:117] "RemoveContainer" containerID="d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d" Nov 21 14:25:41 crc kubenswrapper[4774]: E1121 14:25:41.213398 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d\": container with ID starting with d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d not found: ID does not exist" containerID="d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.213431 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d"} err="failed to get container status \"d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d\": rpc error: code = NotFound desc = could not find container \"d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d\": container with ID starting with d3f27ba7c117cbe3c3664f14462fc39e4fd4911b38cd23276c7659913b0e351d not found: ID does not exist" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.221494 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:25:41 crc kubenswrapper[4774]: E1121 14:25:41.223105 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-httpd" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.223137 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-httpd" Nov 21 14:25:41 crc kubenswrapper[4774]: E1121 14:25:41.223176 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-log" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.223185 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-log" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.223452 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-httpd" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.223481 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" containerName="glance-log" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.226964 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.237130 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.237345 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.264034 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.376329 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.376655 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-config-data\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.376729 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88thd\" (UniqueName: \"kubernetes.io/projected/36597581-6c3f-42a7-98ba-155d3bb19320-kube-api-access-88thd\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.376871 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.376983 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.377039 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.377096 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.377140 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-logs\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.478596 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.478976 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-logs\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479050 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479187 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-config-data\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479213 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88thd\" (UniqueName: \"kubernetes.io/projected/36597581-6c3f-42a7-98ba-155d3bb19320-kube-api-access-88thd\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479275 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479417 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479477 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.479668 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.482594 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.482831 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-logs\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.488519 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.491147 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.501089 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.506021 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-config-data\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.508769 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88thd\" (UniqueName: \"kubernetes.io/projected/36597581-6c3f-42a7-98ba-155d3bb19320-kube-api-access-88thd\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.526504 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " pod="openstack/glance-default-external-api-0" Nov 21 14:25:41 crc kubenswrapper[4774]: I1121 14:25:41.589430 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.109155 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0171c979-def5-4c7e-8551-cd40d008e88e" path="/var/lib/kubelet/pods/0171c979-def5-4c7e-8551-cd40d008e88e/volumes" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.117767 4774 generic.go:334] "Generic (PLEG): container finished" podID="23d00c9c-1862-49ae-94ec-235c331fc220" containerID="e0ea84b2a23cdc1ec4c203d02f89a0f6ca75c3c21faf3eb83c3dab086828478a" exitCode=0 Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.117898 4774 generic.go:334] "Generic (PLEG): container finished" podID="23d00c9c-1862-49ae-94ec-235c331fc220" containerID="0c06047f997e502aab0905ee4d6756e05bfabfa41ffbb238eae6650b6598a01c" exitCode=2 Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.117912 4774 generic.go:334] "Generic (PLEG): container finished" podID="23d00c9c-1862-49ae-94ec-235c331fc220" containerID="62c1d01045b4f96b759839166bc5cce863a04e117255ade6286803b8c10a5d7e" exitCode=0 Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.117944 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerDied","Data":"e0ea84b2a23cdc1ec4c203d02f89a0f6ca75c3c21faf3eb83c3dab086828478a"} Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.117990 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerDied","Data":"0c06047f997e502aab0905ee4d6756e05bfabfa41ffbb238eae6650b6598a01c"} Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.118003 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerDied","Data":"62c1d01045b4f96b759839166bc5cce863a04e117255ade6286803b8c10a5d7e"} Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.159181 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": read tcp 10.217.0.2:45898->10.217.0.150:9292: read: connection reset by peer" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.159181 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.150:9292/healthcheck\": read tcp 10.217.0.2:45914->10.217.0.150:9292: read: connection reset by peer" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.226719 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.820345 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915029 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-httpd-run\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915196 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-config-data\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915296 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-internal-tls-certs\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915331 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-scripts\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915460 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-logs\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915527 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zj4w\" (UniqueName: \"kubernetes.io/projected/aecb7435-3e87-4623-ad69-f322836314a3-kube-api-access-6zj4w\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915615 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-combined-ca-bundle\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915628 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.915653 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"aecb7435-3e87-4623-ad69-f322836314a3\" (UID: \"aecb7435-3e87-4623-ad69-f322836314a3\") " Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.916658 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-logs" (OuterVolumeSpecName: "logs") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.917030 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.917049 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aecb7435-3e87-4623-ad69-f322836314a3-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.933501 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-scripts" (OuterVolumeSpecName: "scripts") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.933548 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.934946 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aecb7435-3e87-4623-ad69-f322836314a3-kube-api-access-6zj4w" (OuterVolumeSpecName: "kube-api-access-6zj4w") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "kube-api-access-6zj4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:42 crc kubenswrapper[4774]: I1121 14:25:42.960575 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.005135 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.019200 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.019247 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.019265 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.019277 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zj4w\" (UniqueName: \"kubernetes.io/projected/aecb7435-3e87-4623-ad69-f322836314a3-kube-api-access-6zj4w\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.019294 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.030092 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-config-data" (OuterVolumeSpecName: "config-data") pod "aecb7435-3e87-4623-ad69-f322836314a3" (UID: "aecb7435-3e87-4623-ad69-f322836314a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.049185 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.129669 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aecb7435-3e87-4623-ad69-f322836314a3-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.130287 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.140573 4774 generic.go:334] "Generic (PLEG): container finished" podID="aecb7435-3e87-4623-ad69-f322836314a3" containerID="5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa" exitCode=0 Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.140676 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"aecb7435-3e87-4623-ad69-f322836314a3","Type":"ContainerDied","Data":"5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa"} Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.140715 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"aecb7435-3e87-4623-ad69-f322836314a3","Type":"ContainerDied","Data":"c60e0e2635bc551e26a5ae76aa252b3d39377a808c32413334fadefa0ac388a9"} Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.140712 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.140778 4774 scope.go:117] "RemoveContainer" containerID="5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.153459 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"36597581-6c3f-42a7-98ba-155d3bb19320","Type":"ContainerStarted","Data":"cf5874a27369c23eb02a38e945173e313d37d5eea273de5528c6e3f4c20042c1"} Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.153556 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"36597581-6c3f-42a7-98ba-155d3bb19320","Type":"ContainerStarted","Data":"def4a4ed1a20cd4df9962234216a7423ad43403aca5dc81f3f3b8c1accd42945"} Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.289017 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.303935 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.312074 4774 scope.go:117] "RemoveContainer" containerID="9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.315389 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:25:43 crc kubenswrapper[4774]: E1121 14:25:43.316534 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-httpd" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.316557 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-httpd" Nov 21 14:25:43 crc kubenswrapper[4774]: E1121 14:25:43.316574 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-log" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.316582 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-log" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.316868 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-log" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.316885 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="aecb7435-3e87-4623-ad69-f322836314a3" containerName="glance-httpd" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.318850 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.325358 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.325600 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.327619 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.407033 4774 scope.go:117] "RemoveContainer" containerID="5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa" Nov 21 14:25:43 crc kubenswrapper[4774]: E1121 14:25:43.407928 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa\": container with ID starting with 5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa not found: ID does not exist" containerID="5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.407971 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa"} err="failed to get container status \"5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa\": rpc error: code = NotFound desc = could not find container \"5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa\": container with ID starting with 5ab3d46fa940996249265de97c79ffffa02872184ba84982b811ac0846d2d0fa not found: ID does not exist" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.409632 4774 scope.go:117] "RemoveContainer" containerID="9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514" Nov 21 14:25:43 crc kubenswrapper[4774]: E1121 14:25:43.410366 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514\": container with ID starting with 9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514 not found: ID does not exist" containerID="9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.410456 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514"} err="failed to get container status \"9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514\": rpc error: code = NotFound desc = could not find container \"9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514\": container with ID starting with 9786d4d02748d2ff6d130724654c2463fa8f17ea8869ab8cdc1dce5bd64aa514 not found: ID does not exist" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.435617 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.435699 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.435803 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-logs\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.435910 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.435941 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pg5z\" (UniqueName: \"kubernetes.io/projected/29fd4802-19c7-4e11-b776-c505c03206b0-kube-api-access-2pg5z\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.435984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.436035 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.436134 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537528 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pg5z\" (UniqueName: \"kubernetes.io/projected/29fd4802-19c7-4e11-b776-c505c03206b0-kube-api-access-2pg5z\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537587 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537633 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537671 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537692 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537712 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537774 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-logs\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.537853 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.538257 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.538267 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.538407 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-logs\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.542663 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.542752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.544483 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.544688 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.557100 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pg5z\" (UniqueName: \"kubernetes.io/projected/29fd4802-19c7-4e11-b776-c505c03206b0-kube-api-access-2pg5z\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.578050 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " pod="openstack/glance-default-internal-api-0" Nov 21 14:25:43 crc kubenswrapper[4774]: I1121 14:25:43.698091 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:44 crc kubenswrapper[4774]: I1121 14:25:44.105596 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aecb7435-3e87-4623-ad69-f322836314a3" path="/var/lib/kubelet/pods/aecb7435-3e87-4623-ad69-f322836314a3/volumes" Nov 21 14:25:44 crc kubenswrapper[4774]: I1121 14:25:44.166812 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"36597581-6c3f-42a7-98ba-155d3bb19320","Type":"ContainerStarted","Data":"c3d2080c4d1517a927737cdfe470200b33ed1dfc064dd0c21a2afa217e1ea935"} Nov 21 14:25:44 crc kubenswrapper[4774]: I1121 14:25:44.198858 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.198798803 podStartE2EDuration="3.198798803s" podCreationTimestamp="2025-11-21 14:25:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:44.190469292 +0000 UTC m=+1334.842668551" watchObservedRunningTime="2025-11-21 14:25:44.198798803 +0000 UTC m=+1334.850998072" Nov 21 14:25:44 crc kubenswrapper[4774]: I1121 14:25:44.270914 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.186466 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"29fd4802-19c7-4e11-b776-c505c03206b0","Type":"ContainerStarted","Data":"66249ecbe2c3348c6acd48e9804c896c943d2119544945a4641b3cd22603525d"} Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.187037 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"29fd4802-19c7-4e11-b776-c505c03206b0","Type":"ContainerStarted","Data":"d41c483404d04ea1443410bd3c7d1ec1118de9dd5dec53ef7c7720dc6b26add8"} Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.193651 4774 generic.go:334] "Generic (PLEG): container finished" podID="23d00c9c-1862-49ae-94ec-235c331fc220" containerID="7ede4691a038e33a7b99a01d7bb07a6aa4b6971e8064bb3c77ae9c5425a60ccd" exitCode=0 Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.194961 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerDied","Data":"7ede4691a038e33a7b99a01d7bb07a6aa4b6971e8064bb3c77ae9c5425a60ccd"} Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.265679 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.381608 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-sg-core-conf-yaml\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.381690 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-run-httpd\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.381722 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-combined-ca-bundle\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.381885 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-config-data\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.381961 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-log-httpd\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.382030 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lnj2b\" (UniqueName: \"kubernetes.io/projected/23d00c9c-1862-49ae-94ec-235c331fc220-kube-api-access-lnj2b\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.382080 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-scripts\") pod \"23d00c9c-1862-49ae-94ec-235c331fc220\" (UID: \"23d00c9c-1862-49ae-94ec-235c331fc220\") " Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.382642 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.383210 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.394524 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23d00c9c-1862-49ae-94ec-235c331fc220-kube-api-access-lnj2b" (OuterVolumeSpecName: "kube-api-access-lnj2b") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "kube-api-access-lnj2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.399991 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-scripts" (OuterVolumeSpecName: "scripts") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.421055 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.486126 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.486166 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.486183 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/23d00c9c-1862-49ae-94ec-235c331fc220-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.486194 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lnj2b\" (UniqueName: \"kubernetes.io/projected/23d00c9c-1862-49ae-94ec-235c331fc220-kube-api-access-lnj2b\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.486210 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.494499 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.530447 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-config-data" (OuterVolumeSpecName: "config-data") pod "23d00c9c-1862-49ae-94ec-235c331fc220" (UID: "23d00c9c-1862-49ae-94ec-235c331fc220"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.595445 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:45 crc kubenswrapper[4774]: I1121 14:25:45.595488 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23d00c9c-1862-49ae-94ec-235c331fc220-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.204964 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"29fd4802-19c7-4e11-b776-c505c03206b0","Type":"ContainerStarted","Data":"08dcf92110aca28bb33e09d2cf80555b027cc58cf28e0ba6099d79517b3e3e96"} Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.208894 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"23d00c9c-1862-49ae-94ec-235c331fc220","Type":"ContainerDied","Data":"41c1f908c661578a38d5d5b73afd923d3d8fafe7ba4342f57049b10a2fa8a729"} Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.208954 4774 scope.go:117] "RemoveContainer" containerID="e0ea84b2a23cdc1ec4c203d02f89a0f6ca75c3c21faf3eb83c3dab086828478a" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.208987 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.233034 4774 scope.go:117] "RemoveContainer" containerID="0c06047f997e502aab0905ee4d6756e05bfabfa41ffbb238eae6650b6598a01c" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.239481 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.239444827 podStartE2EDuration="3.239444827s" podCreationTimestamp="2025-11-21 14:25:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:25:46.230196189 +0000 UTC m=+1336.882395448" watchObservedRunningTime="2025-11-21 14:25:46.239444827 +0000 UTC m=+1336.891644086" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.255672 4774 scope.go:117] "RemoveContainer" containerID="62c1d01045b4f96b759839166bc5cce863a04e117255ade6286803b8c10a5d7e" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.262846 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.279996 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.287039 4774 scope.go:117] "RemoveContainer" containerID="7ede4691a038e33a7b99a01d7bb07a6aa4b6971e8064bb3c77ae9c5425a60ccd" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297096 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:46 crc kubenswrapper[4774]: E1121 14:25:46.297622 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="proxy-httpd" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297639 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="proxy-httpd" Nov 21 14:25:46 crc kubenswrapper[4774]: E1121 14:25:46.297651 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="sg-core" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297656 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="sg-core" Nov 21 14:25:46 crc kubenswrapper[4774]: E1121 14:25:46.297684 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-notification-agent" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297690 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-notification-agent" Nov 21 14:25:46 crc kubenswrapper[4774]: E1121 14:25:46.297709 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-central-agent" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297715 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-central-agent" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297938 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="sg-core" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297951 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="proxy-httpd" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297964 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-notification-agent" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.297985 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" containerName="ceilometer-central-agent" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.299779 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.307648 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.308512 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.309615 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.410613 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-scripts\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.410692 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r92hc\" (UniqueName: \"kubernetes.io/projected/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-kube-api-access-r92hc\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.410732 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.410962 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.411046 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-run-httpd\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.411185 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-config-data\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.411231 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-log-httpd\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513430 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513506 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-run-httpd\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513558 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-config-data\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513581 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-log-httpd\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513641 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-scripts\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513683 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r92hc\" (UniqueName: \"kubernetes.io/projected/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-kube-api-access-r92hc\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.513712 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.514231 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-log-httpd\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.514893 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-run-httpd\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.519808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.520141 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-config-data\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.520460 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-scripts\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.536811 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.540428 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r92hc\" (UniqueName: \"kubernetes.io/projected/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-kube-api-access-r92hc\") pod \"ceilometer-0\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.553752 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-d6wf2"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.555415 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.565032 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-d6wf2"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.616076 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6360ba89-8432-49d8-b5ea-97a52784ea66-operator-scripts\") pod \"nova-api-db-create-d6wf2\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.618470 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfznz\" (UniqueName: \"kubernetes.io/projected/6360ba89-8432-49d8-b5ea-97a52784ea66-kube-api-access-rfznz\") pod \"nova-api-db-create-d6wf2\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.629583 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.653837 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-7vg67"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.655417 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.665442 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-7vg67"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.720423 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89db7\" (UniqueName: \"kubernetes.io/projected/46518ae6-7502-4276-8b86-58e85eff4951-kube-api-access-89db7\") pod \"nova-cell0-db-create-7vg67\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.720855 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6360ba89-8432-49d8-b5ea-97a52784ea66-operator-scripts\") pod \"nova-api-db-create-d6wf2\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.721024 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfznz\" (UniqueName: \"kubernetes.io/projected/6360ba89-8432-49d8-b5ea-97a52784ea66-kube-api-access-rfznz\") pod \"nova-api-db-create-d6wf2\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.721622 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46518ae6-7502-4276-8b86-58e85eff4951-operator-scripts\") pod \"nova-cell0-db-create-7vg67\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.723174 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6360ba89-8432-49d8-b5ea-97a52784ea66-operator-scripts\") pod \"nova-api-db-create-d6wf2\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.757449 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfznz\" (UniqueName: \"kubernetes.io/projected/6360ba89-8432-49d8-b5ea-97a52784ea66-kube-api-access-rfznz\") pod \"nova-api-db-create-d6wf2\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.770068 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-zgkzk"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.771745 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.785884 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-242e-account-create-84s8b"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.787323 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.791641 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.794158 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zgkzk"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.803344 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-242e-account-create-84s8b"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.826902 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n55rc\" (UniqueName: \"kubernetes.io/projected/f30db86a-e756-4f9a-9691-1642d9678687-kube-api-access-n55rc\") pod \"nova-cell1-db-create-zgkzk\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.826980 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46518ae6-7502-4276-8b86-58e85eff4951-operator-scripts\") pod \"nova-cell0-db-create-7vg67\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.827001 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xvq8\" (UniqueName: \"kubernetes.io/projected/6513cfab-0b30-4103-8e71-3492d2013657-kube-api-access-2xvq8\") pod \"nova-api-242e-account-create-84s8b\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.831075 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89db7\" (UniqueName: \"kubernetes.io/projected/46518ae6-7502-4276-8b86-58e85eff4951-kube-api-access-89db7\") pod \"nova-cell0-db-create-7vg67\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.831144 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6513cfab-0b30-4103-8e71-3492d2013657-operator-scripts\") pod \"nova-api-242e-account-create-84s8b\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.831188 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f30db86a-e756-4f9a-9691-1642d9678687-operator-scripts\") pod \"nova-cell1-db-create-zgkzk\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.832238 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46518ae6-7502-4276-8b86-58e85eff4951-operator-scripts\") pod \"nova-cell0-db-create-7vg67\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.853107 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89db7\" (UniqueName: \"kubernetes.io/projected/46518ae6-7502-4276-8b86-58e85eff4951-kube-api-access-89db7\") pod \"nova-cell0-db-create-7vg67\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.933496 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n55rc\" (UniqueName: \"kubernetes.io/projected/f30db86a-e756-4f9a-9691-1642d9678687-kube-api-access-n55rc\") pod \"nova-cell1-db-create-zgkzk\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.933583 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xvq8\" (UniqueName: \"kubernetes.io/projected/6513cfab-0b30-4103-8e71-3492d2013657-kube-api-access-2xvq8\") pod \"nova-api-242e-account-create-84s8b\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.933669 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6513cfab-0b30-4103-8e71-3492d2013657-operator-scripts\") pod \"nova-api-242e-account-create-84s8b\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.933697 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f30db86a-e756-4f9a-9691-1642d9678687-operator-scripts\") pod \"nova-cell1-db-create-zgkzk\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.935102 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f30db86a-e756-4f9a-9691-1642d9678687-operator-scripts\") pod \"nova-cell1-db-create-zgkzk\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.937260 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.938806 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6513cfab-0b30-4103-8e71-3492d2013657-operator-scripts\") pod \"nova-api-242e-account-create-84s8b\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.964088 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-3102-account-create-g4bl5"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.966361 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xvq8\" (UniqueName: \"kubernetes.io/projected/6513cfab-0b30-4103-8e71-3492d2013657-kube-api-access-2xvq8\") pod \"nova-api-242e-account-create-84s8b\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.968213 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n55rc\" (UniqueName: \"kubernetes.io/projected/f30db86a-e756-4f9a-9691-1642d9678687-kube-api-access-n55rc\") pod \"nova-cell1-db-create-zgkzk\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.969648 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.970647 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3102-account-create-g4bl5"] Nov 21 14:25:46 crc kubenswrapper[4774]: I1121 14:25:46.972648 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.037024 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8nj5\" (UniqueName: \"kubernetes.io/projected/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-kube-api-access-t8nj5\") pod \"nova-cell0-3102-account-create-g4bl5\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.037511 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-operator-scripts\") pod \"nova-cell0-3102-account-create-g4bl5\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.059752 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.070280 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.102020 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.119453 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.143480 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-operator-scripts\") pod \"nova-cell0-3102-account-create-g4bl5\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.143642 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8nj5\" (UniqueName: \"kubernetes.io/projected/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-kube-api-access-t8nj5\") pod \"nova-cell0-3102-account-create-g4bl5\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.145566 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-operator-scripts\") pod \"nova-cell0-3102-account-create-g4bl5\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.175760 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-a40a-account-create-bmj2k"] Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.177679 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.182140 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.183471 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-a40a-account-create-bmj2k"] Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.187891 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8nj5\" (UniqueName: \"kubernetes.io/projected/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-kube-api-access-t8nj5\") pod \"nova-cell0-3102-account-create-g4bl5\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.245699 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wj7c8\" (UniqueName: \"kubernetes.io/projected/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-kube-api-access-wj7c8\") pod \"nova-cell1-a40a-account-create-bmj2k\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.247121 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-operator-scripts\") pod \"nova-cell1-a40a-account-create-bmj2k\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.245730 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.296449 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.358507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wj7c8\" (UniqueName: \"kubernetes.io/projected/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-kube-api-access-wj7c8\") pod \"nova-cell1-a40a-account-create-bmj2k\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.359022 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-operator-scripts\") pod \"nova-cell1-a40a-account-create-bmj2k\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.360305 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-operator-scripts\") pod \"nova-cell1-a40a-account-create-bmj2k\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.381403 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wj7c8\" (UniqueName: \"kubernetes.io/projected/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-kube-api-access-wj7c8\") pod \"nova-cell1-a40a-account-create-bmj2k\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.521348 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.567134 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-d6wf2"] Nov 21 14:25:47 crc kubenswrapper[4774]: I1121 14:25:47.857241 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-7vg67"] Nov 21 14:25:47 crc kubenswrapper[4774]: W1121 14:25:47.868380 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46518ae6_7502_4276_8b86_58e85eff4951.slice/crio-678083493eaa23d6a7f2b80d00e187773e3ac91c7c2b4608cf09b8d158274b05 WatchSource:0}: Error finding container 678083493eaa23d6a7f2b80d00e187773e3ac91c7c2b4608cf09b8d158274b05: Status 404 returned error can't find the container with id 678083493eaa23d6a7f2b80d00e187773e3ac91c7c2b4608cf09b8d158274b05 Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.000911 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-242e-account-create-84s8b"] Nov 21 14:25:48 crc kubenswrapper[4774]: W1121 14:25:48.005695 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6513cfab_0b30_4103_8e71_3492d2013657.slice/crio-f59a923432f5d0b2094e9c3320d7992eaf763d2df635c0038f28f4fbadc3a81f WatchSource:0}: Error finding container f59a923432f5d0b2094e9c3320d7992eaf763d2df635c0038f28f4fbadc3a81f: Status 404 returned error can't find the container with id f59a923432f5d0b2094e9c3320d7992eaf763d2df635c0038f28f4fbadc3a81f Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.015812 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-zgkzk"] Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.078172 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-a40a-account-create-bmj2k"] Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.146937 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23d00c9c-1862-49ae-94ec-235c331fc220" path="/var/lib/kubelet/pods/23d00c9c-1862-49ae-94ec-235c331fc220/volumes" Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.149728 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-3102-account-create-g4bl5"] Nov 21 14:25:48 crc kubenswrapper[4774]: W1121 14:25:48.166198 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2969283f_bdf3_4a7c_88c3_04e0b009a6b9.slice/crio-3bf6945b35d8f78da60cf69f7a0157d2b82448c06db3b249d802fed615e4f81b WatchSource:0}: Error finding container 3bf6945b35d8f78da60cf69f7a0157d2b82448c06db3b249d802fed615e4f81b: Status 404 returned error can't find the container with id 3bf6945b35d8f78da60cf69f7a0157d2b82448c06db3b249d802fed615e4f81b Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.267169 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a40a-account-create-bmj2k" event={"ID":"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3","Type":"ContainerStarted","Data":"1765fe352e317d84679f8901993a46390d1eef35b35ed6bcde17c3c97d931fd1"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.276683 4774 generic.go:334] "Generic (PLEG): container finished" podID="6360ba89-8432-49d8-b5ea-97a52784ea66" containerID="60d2bd1d86a69536d7cbb5a80d6478c043cbf386394ef9477d5c5f4ac5d413ce" exitCode=0 Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.277276 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d6wf2" event={"ID":"6360ba89-8432-49d8-b5ea-97a52784ea66","Type":"ContainerDied","Data":"60d2bd1d86a69536d7cbb5a80d6478c043cbf386394ef9477d5c5f4ac5d413ce"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.277311 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d6wf2" event={"ID":"6360ba89-8432-49d8-b5ea-97a52784ea66","Type":"ContainerStarted","Data":"9911c20f90a9db93914fe0e510a3671aff337bf18aba9ac9ecc7284b31e6a35e"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.282710 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-242e-account-create-84s8b" event={"ID":"6513cfab-0b30-4103-8e71-3492d2013657","Type":"ContainerStarted","Data":"f59a923432f5d0b2094e9c3320d7992eaf763d2df635c0038f28f4fbadc3a81f"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.288069 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerStarted","Data":"88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.288290 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerStarted","Data":"586f5a5ae5732bbbc3b6e1b5371a370cbda75b7714403e2793d1d093e118d0b7"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.309175 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3102-account-create-g4bl5" event={"ID":"2969283f-bdf3-4a7c-88c3-04e0b009a6b9","Type":"ContainerStarted","Data":"3bf6945b35d8f78da60cf69f7a0157d2b82448c06db3b249d802fed615e4f81b"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.313510 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7vg67" event={"ID":"46518ae6-7502-4276-8b86-58e85eff4951","Type":"ContainerStarted","Data":"8359f311c5a169817e14ceb2a2bd6f2047c46e90f7fcf834e5f7b4e695b44023"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.313719 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7vg67" event={"ID":"46518ae6-7502-4276-8b86-58e85eff4951","Type":"ContainerStarted","Data":"678083493eaa23d6a7f2b80d00e187773e3ac91c7c2b4608cf09b8d158274b05"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.315469 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zgkzk" event={"ID":"f30db86a-e756-4f9a-9691-1642d9678687","Type":"ContainerStarted","Data":"1da85c342e2c6fcb2d65a23a9152d1f160fb513b6a5b9ba5e8f91c49ffe8cfbc"} Nov 21 14:25:48 crc kubenswrapper[4774]: I1121 14:25:48.563941 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.328087 4774 generic.go:334] "Generic (PLEG): container finished" podID="f30db86a-e756-4f9a-9691-1642d9678687" containerID="d4666e8da93a9918577e1748e849a82b0f153998843e0e0d13b0cf184d7b9c3e" exitCode=0 Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.328202 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zgkzk" event={"ID":"f30db86a-e756-4f9a-9691-1642d9678687","Type":"ContainerDied","Data":"d4666e8da93a9918577e1748e849a82b0f153998843e0e0d13b0cf184d7b9c3e"} Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.336794 4774 generic.go:334] "Generic (PLEG): container finished" podID="7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" containerID="b8d03bb1b95b5563b9e17389f6912b365ce1253044fc2b37f70c9d63eb743194" exitCode=0 Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.336894 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a40a-account-create-bmj2k" event={"ID":"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3","Type":"ContainerDied","Data":"b8d03bb1b95b5563b9e17389f6912b365ce1253044fc2b37f70c9d63eb743194"} Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.339220 4774 generic.go:334] "Generic (PLEG): container finished" podID="6513cfab-0b30-4103-8e71-3492d2013657" containerID="0b5271262c3c849a993c33ce298b38eb7c992e1d7d39bbd4ae89cc26488705d8" exitCode=0 Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.339329 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-242e-account-create-84s8b" event={"ID":"6513cfab-0b30-4103-8e71-3492d2013657","Type":"ContainerDied","Data":"0b5271262c3c849a993c33ce298b38eb7c992e1d7d39bbd4ae89cc26488705d8"} Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.342340 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerStarted","Data":"5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae"} Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.347449 4774 generic.go:334] "Generic (PLEG): container finished" podID="2969283f-bdf3-4a7c-88c3-04e0b009a6b9" containerID="0b0a5c5f9e19b8e02291c0c54cddb5779d88fddf73c5f3de122740b8608b1067" exitCode=0 Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.347550 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3102-account-create-g4bl5" event={"ID":"2969283f-bdf3-4a7c-88c3-04e0b009a6b9","Type":"ContainerDied","Data":"0b0a5c5f9e19b8e02291c0c54cddb5779d88fddf73c5f3de122740b8608b1067"} Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.350806 4774 generic.go:334] "Generic (PLEG): container finished" podID="46518ae6-7502-4276-8b86-58e85eff4951" containerID="8359f311c5a169817e14ceb2a2bd6f2047c46e90f7fcf834e5f7b4e695b44023" exitCode=0 Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.351304 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7vg67" event={"ID":"46518ae6-7502-4276-8b86-58e85eff4951","Type":"ContainerDied","Data":"8359f311c5a169817e14ceb2a2bd6f2047c46e90f7fcf834e5f7b4e695b44023"} Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.845722 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.851640 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.943276 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6360ba89-8432-49d8-b5ea-97a52784ea66-operator-scripts\") pod \"6360ba89-8432-49d8-b5ea-97a52784ea66\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.943411 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89db7\" (UniqueName: \"kubernetes.io/projected/46518ae6-7502-4276-8b86-58e85eff4951-kube-api-access-89db7\") pod \"46518ae6-7502-4276-8b86-58e85eff4951\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.943572 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfznz\" (UniqueName: \"kubernetes.io/projected/6360ba89-8432-49d8-b5ea-97a52784ea66-kube-api-access-rfznz\") pod \"6360ba89-8432-49d8-b5ea-97a52784ea66\" (UID: \"6360ba89-8432-49d8-b5ea-97a52784ea66\") " Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.943809 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46518ae6-7502-4276-8b86-58e85eff4951-operator-scripts\") pod \"46518ae6-7502-4276-8b86-58e85eff4951\" (UID: \"46518ae6-7502-4276-8b86-58e85eff4951\") " Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.945669 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46518ae6-7502-4276-8b86-58e85eff4951-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "46518ae6-7502-4276-8b86-58e85eff4951" (UID: "46518ae6-7502-4276-8b86-58e85eff4951"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.946210 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6360ba89-8432-49d8-b5ea-97a52784ea66-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6360ba89-8432-49d8-b5ea-97a52784ea66" (UID: "6360ba89-8432-49d8-b5ea-97a52784ea66"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.952836 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46518ae6-7502-4276-8b86-58e85eff4951-kube-api-access-89db7" (OuterVolumeSpecName: "kube-api-access-89db7") pod "46518ae6-7502-4276-8b86-58e85eff4951" (UID: "46518ae6-7502-4276-8b86-58e85eff4951"). InnerVolumeSpecName "kube-api-access-89db7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:49 crc kubenswrapper[4774]: I1121 14:25:49.959690 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6360ba89-8432-49d8-b5ea-97a52784ea66-kube-api-access-rfznz" (OuterVolumeSpecName: "kube-api-access-rfznz") pod "6360ba89-8432-49d8-b5ea-97a52784ea66" (UID: "6360ba89-8432-49d8-b5ea-97a52784ea66"). InnerVolumeSpecName "kube-api-access-rfznz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.047239 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfznz\" (UniqueName: \"kubernetes.io/projected/6360ba89-8432-49d8-b5ea-97a52784ea66-kube-api-access-rfznz\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.047289 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46518ae6-7502-4276-8b86-58e85eff4951-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.047300 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6360ba89-8432-49d8-b5ea-97a52784ea66-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.047309 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89db7\" (UniqueName: \"kubernetes.io/projected/46518ae6-7502-4276-8b86-58e85eff4951-kube-api-access-89db7\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.370180 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-7vg67" event={"ID":"46518ae6-7502-4276-8b86-58e85eff4951","Type":"ContainerDied","Data":"678083493eaa23d6a7f2b80d00e187773e3ac91c7c2b4608cf09b8d158274b05"} Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.370581 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="678083493eaa23d6a7f2b80d00e187773e3ac91c7c2b4608cf09b8d158274b05" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.370265 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-7vg67" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.372680 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-d6wf2" event={"ID":"6360ba89-8432-49d8-b5ea-97a52784ea66","Type":"ContainerDied","Data":"9911c20f90a9db93914fe0e510a3671aff337bf18aba9ac9ecc7284b31e6a35e"} Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.372735 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9911c20f90a9db93914fe0e510a3671aff337bf18aba9ac9ecc7284b31e6a35e" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.372797 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-d6wf2" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.380290 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerStarted","Data":"e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068"} Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.810714 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.863841 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8nj5\" (UniqueName: \"kubernetes.io/projected/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-kube-api-access-t8nj5\") pod \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.863921 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-operator-scripts\") pod \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\" (UID: \"2969283f-bdf3-4a7c-88c3-04e0b009a6b9\") " Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.865556 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2969283f-bdf3-4a7c-88c3-04e0b009a6b9" (UID: "2969283f-bdf3-4a7c-88c3-04e0b009a6b9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.873954 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-kube-api-access-t8nj5" (OuterVolumeSpecName: "kube-api-access-t8nj5") pod "2969283f-bdf3-4a7c-88c3-04e0b009a6b9" (UID: "2969283f-bdf3-4a7c-88c3-04e0b009a6b9"). InnerVolumeSpecName "kube-api-access-t8nj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.967867 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8nj5\" (UniqueName: \"kubernetes.io/projected/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-kube-api-access-t8nj5\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:50 crc kubenswrapper[4774]: I1121 14:25:50.967932 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2969283f-bdf3-4a7c-88c3-04e0b009a6b9-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:50.999437 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.017341 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.049420 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.069767 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-operator-scripts\") pod \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.069982 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wj7c8\" (UniqueName: \"kubernetes.io/projected/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-kube-api-access-wj7c8\") pod \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\" (UID: \"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3\") " Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.070062 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6513cfab-0b30-4103-8e71-3492d2013657-operator-scripts\") pod \"6513cfab-0b30-4103-8e71-3492d2013657\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.070152 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xvq8\" (UniqueName: \"kubernetes.io/projected/6513cfab-0b30-4103-8e71-3492d2013657-kube-api-access-2xvq8\") pod \"6513cfab-0b30-4103-8e71-3492d2013657\" (UID: \"6513cfab-0b30-4103-8e71-3492d2013657\") " Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.078683 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6513cfab-0b30-4103-8e71-3492d2013657-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6513cfab-0b30-4103-8e71-3492d2013657" (UID: "6513cfab-0b30-4103-8e71-3492d2013657"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.079385 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" (UID: "7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.092198 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-kube-api-access-wj7c8" (OuterVolumeSpecName: "kube-api-access-wj7c8") pod "7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" (UID: "7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3"). InnerVolumeSpecName "kube-api-access-wj7c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.093593 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6513cfab-0b30-4103-8e71-3492d2013657-kube-api-access-2xvq8" (OuterVolumeSpecName: "kube-api-access-2xvq8") pod "6513cfab-0b30-4103-8e71-3492d2013657" (UID: "6513cfab-0b30-4103-8e71-3492d2013657"). InnerVolumeSpecName "kube-api-access-2xvq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.171928 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f30db86a-e756-4f9a-9691-1642d9678687-operator-scripts\") pod \"f30db86a-e756-4f9a-9691-1642d9678687\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.172172 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n55rc\" (UniqueName: \"kubernetes.io/projected/f30db86a-e756-4f9a-9691-1642d9678687-kube-api-access-n55rc\") pod \"f30db86a-e756-4f9a-9691-1642d9678687\" (UID: \"f30db86a-e756-4f9a-9691-1642d9678687\") " Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.176229 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.176275 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wj7c8\" (UniqueName: \"kubernetes.io/projected/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3-kube-api-access-wj7c8\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.176290 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6513cfab-0b30-4103-8e71-3492d2013657-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.176301 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xvq8\" (UniqueName: \"kubernetes.io/projected/6513cfab-0b30-4103-8e71-3492d2013657-kube-api-access-2xvq8\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.178112 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f30db86a-e756-4f9a-9691-1642d9678687-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f30db86a-e756-4f9a-9691-1642d9678687" (UID: "f30db86a-e756-4f9a-9691-1642d9678687"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.179907 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f30db86a-e756-4f9a-9691-1642d9678687-kube-api-access-n55rc" (OuterVolumeSpecName: "kube-api-access-n55rc") pod "f30db86a-e756-4f9a-9691-1642d9678687" (UID: "f30db86a-e756-4f9a-9691-1642d9678687"). InnerVolumeSpecName "kube-api-access-n55rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.278262 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f30db86a-e756-4f9a-9691-1642d9678687-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.278304 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n55rc\" (UniqueName: \"kubernetes.io/projected/f30db86a-e756-4f9a-9691-1642d9678687-kube-api-access-n55rc\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.428765 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerStarted","Data":"18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae"} Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.428922 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="proxy-httpd" containerID="cri-o://18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae" gracePeriod=30 Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.428992 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.428922 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-central-agent" containerID="cri-o://88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b" gracePeriod=30 Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.429106 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-notification-agent" containerID="cri-o://5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae" gracePeriod=30 Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.429174 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="sg-core" containerID="cri-o://e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068" gracePeriod=30 Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.443539 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-3102-account-create-g4bl5" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.443539 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-3102-account-create-g4bl5" event={"ID":"2969283f-bdf3-4a7c-88c3-04e0b009a6b9","Type":"ContainerDied","Data":"3bf6945b35d8f78da60cf69f7a0157d2b82448c06db3b249d802fed615e4f81b"} Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.443974 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bf6945b35d8f78da60cf69f7a0157d2b82448c06db3b249d802fed615e4f81b" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.450698 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-zgkzk" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.450734 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-zgkzk" event={"ID":"f30db86a-e756-4f9a-9691-1642d9678687","Type":"ContainerDied","Data":"1da85c342e2c6fcb2d65a23a9152d1f160fb513b6a5b9ba5e8f91c49ffe8cfbc"} Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.450868 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1da85c342e2c6fcb2d65a23a9152d1f160fb513b6a5b9ba5e8f91c49ffe8cfbc" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.455744 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8324997600000001 podStartE2EDuration="5.455719123s" podCreationTimestamp="2025-11-21 14:25:46 +0000 UTC" firstStartedPulling="2025-11-21 14:25:47.301012552 +0000 UTC m=+1337.953211811" lastFinishedPulling="2025-11-21 14:25:50.924231915 +0000 UTC m=+1341.576431174" observedRunningTime="2025-11-21 14:25:51.453274552 +0000 UTC m=+1342.105473811" watchObservedRunningTime="2025-11-21 14:25:51.455719123 +0000 UTC m=+1342.107918382" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.461554 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-a40a-account-create-bmj2k" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.462380 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-a40a-account-create-bmj2k" event={"ID":"7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3","Type":"ContainerDied","Data":"1765fe352e317d84679f8901993a46390d1eef35b35ed6bcde17c3c97d931fd1"} Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.462425 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1765fe352e317d84679f8901993a46390d1eef35b35ed6bcde17c3c97d931fd1" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.468127 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-242e-account-create-84s8b" event={"ID":"6513cfab-0b30-4103-8e71-3492d2013657","Type":"ContainerDied","Data":"f59a923432f5d0b2094e9c3320d7992eaf763d2df635c0038f28f4fbadc3a81f"} Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.468177 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f59a923432f5d0b2094e9c3320d7992eaf763d2df635c0038f28f4fbadc3a81f" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.468283 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-242e-account-create-84s8b" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.591213 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.591559 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.642731 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 14:25:51 crc kubenswrapper[4774]: I1121 14:25:51.647595 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.272353 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9vfmw"] Nov 21 14:25:52 crc kubenswrapper[4774]: E1121 14:25:52.272930 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f30db86a-e756-4f9a-9691-1642d9678687" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.272958 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f30db86a-e756-4f9a-9691-1642d9678687" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: E1121 14:25:52.272980 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6360ba89-8432-49d8-b5ea-97a52784ea66" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.272990 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6360ba89-8432-49d8-b5ea-97a52784ea66" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: E1121 14:25:52.273007 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2969283f-bdf3-4a7c-88c3-04e0b009a6b9" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273017 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2969283f-bdf3-4a7c-88c3-04e0b009a6b9" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: E1121 14:25:52.273049 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46518ae6-7502-4276-8b86-58e85eff4951" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273057 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="46518ae6-7502-4276-8b86-58e85eff4951" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: E1121 14:25:52.273091 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273195 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: E1121 14:25:52.273239 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6513cfab-0b30-4103-8e71-3492d2013657" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273254 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6513cfab-0b30-4103-8e71-3492d2013657" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273588 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f30db86a-e756-4f9a-9691-1642d9678687" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273620 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273640 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6360ba89-8432-49d8-b5ea-97a52784ea66" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273655 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6513cfab-0b30-4103-8e71-3492d2013657" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273675 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2969283f-bdf3-4a7c-88c3-04e0b009a6b9" containerName="mariadb-account-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.273698 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="46518ae6-7502-4276-8b86-58e85eff4951" containerName="mariadb-database-create" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.274792 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.279695 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lh6tx" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.279756 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.280228 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.287551 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9vfmw"] Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.326634 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n42bz\" (UniqueName: \"kubernetes.io/projected/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-kube-api-access-n42bz\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.326866 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-scripts\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.327032 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.327100 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-config-data\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.429720 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.429792 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-config-data\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.429929 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n42bz\" (UniqueName: \"kubernetes.io/projected/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-kube-api-access-n42bz\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.430019 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-scripts\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.436681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.437347 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-config-data\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.453482 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-scripts\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.453602 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n42bz\" (UniqueName: \"kubernetes.io/projected/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-kube-api-access-n42bz\") pod \"nova-cell0-conductor-db-sync-9vfmw\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.509691 4774 generic.go:334] "Generic (PLEG): container finished" podID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerID="18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae" exitCode=0 Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.509757 4774 generic.go:334] "Generic (PLEG): container finished" podID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerID="e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068" exitCode=2 Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.509764 4774 generic.go:334] "Generic (PLEG): container finished" podID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerID="5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae" exitCode=0 Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.509778 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerDied","Data":"18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae"} Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.509933 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerDied","Data":"e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068"} Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.509949 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerDied","Data":"5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae"} Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.511831 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.511918 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 14:25:52 crc kubenswrapper[4774]: I1121 14:25:52.595799 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:25:53 crc kubenswrapper[4774]: I1121 14:25:53.098952 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9vfmw"] Nov 21 14:25:53 crc kubenswrapper[4774]: W1121 14:25:53.100458 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb13000ea_9b2c_47fe_aa5f_3e1de9f83511.slice/crio-48a223b454d982b7e6cf0254c0f53c4465d581b446a4461f1e07a4c02e465419 WatchSource:0}: Error finding container 48a223b454d982b7e6cf0254c0f53c4465d581b446a4461f1e07a4c02e465419: Status 404 returned error can't find the container with id 48a223b454d982b7e6cf0254c0f53c4465d581b446a4461f1e07a4c02e465419 Nov 21 14:25:53 crc kubenswrapper[4774]: I1121 14:25:53.526004 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" event={"ID":"b13000ea-9b2c-47fe-aa5f-3e1de9f83511","Type":"ContainerStarted","Data":"48a223b454d982b7e6cf0254c0f53c4465d581b446a4461f1e07a4c02e465419"} Nov 21 14:25:53 crc kubenswrapper[4774]: I1121 14:25:53.699412 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:53 crc kubenswrapper[4774]: I1121 14:25:53.699979 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:53 crc kubenswrapper[4774]: I1121 14:25:53.752580 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:53 crc kubenswrapper[4774]: I1121 14:25:53.772777 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:54 crc kubenswrapper[4774]: I1121 14:25:54.535075 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:25:54 crc kubenswrapper[4774]: I1121 14:25:54.535116 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:25:54 crc kubenswrapper[4774]: I1121 14:25:54.535448 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:54 crc kubenswrapper[4774]: I1121 14:25:54.535492 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:54 crc kubenswrapper[4774]: I1121 14:25:54.637013 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 14:25:54 crc kubenswrapper[4774]: I1121 14:25:54.639481 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.250242 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.409567 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-run-httpd\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.409676 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-log-httpd\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.409778 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-scripts\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.409987 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r92hc\" (UniqueName: \"kubernetes.io/projected/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-kube-api-access-r92hc\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.410060 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-config-data\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.410176 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.410187 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-sg-core-conf-yaml\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.410289 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-combined-ca-bundle\") pod \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\" (UID: \"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38\") " Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.410324 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.410854 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.411228 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.433420 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-scripts" (OuterVolumeSpecName: "scripts") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.433473 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-kube-api-access-r92hc" (OuterVolumeSpecName: "kube-api-access-r92hc") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "kube-api-access-r92hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.442987 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.505348 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.513772 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.514138 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.514153 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.514163 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r92hc\" (UniqueName: \"kubernetes.io/projected/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-kube-api-access-r92hc\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.524636 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-config-data" (OuterVolumeSpecName: "config-data") pod "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" (UID: "03cb0172-9ef5-4318-b9d8-c8c3b3a82c38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.561697 4774 generic.go:334] "Generic (PLEG): container finished" podID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerID="88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b" exitCode=0 Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.561781 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerDied","Data":"88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b"} Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.561912 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"03cb0172-9ef5-4318-b9d8-c8c3b3a82c38","Type":"ContainerDied","Data":"586f5a5ae5732bbbc3b6e1b5371a370cbda75b7714403e2793d1d093e118d0b7"} Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.561957 4774 scope.go:117] "RemoveContainer" containerID="18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.563477 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.591160 4774 scope.go:117] "RemoveContainer" containerID="e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.610594 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.628795 4774 scope.go:117] "RemoveContainer" containerID="5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.629291 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.641477 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.672165 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.673496 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="sg-core" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673520 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="sg-core" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.673551 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="proxy-httpd" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673557 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="proxy-httpd" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.673569 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-central-agent" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673576 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-central-agent" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.673604 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-notification-agent" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673610 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-notification-agent" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673775 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="sg-core" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673786 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="proxy-httpd" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673804 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-notification-agent" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.673844 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" containerName="ceilometer-central-agent" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.676030 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.684447 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.684500 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.686095 4774 scope.go:117] "RemoveContainer" containerID="88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.686444 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.738358 4774 scope.go:117] "RemoveContainer" containerID="18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.739015 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae\": container with ID starting with 18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae not found: ID does not exist" containerID="18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.739066 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae"} err="failed to get container status \"18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae\": rpc error: code = NotFound desc = could not find container \"18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae\": container with ID starting with 18e674e747455da20436465be36b53aab056e6ab8176baaf9897e01eda49deae not found: ID does not exist" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.739193 4774 scope.go:117] "RemoveContainer" containerID="e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.743349 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068\": container with ID starting with e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068 not found: ID does not exist" containerID="e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.743383 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068"} err="failed to get container status \"e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068\": rpc error: code = NotFound desc = could not find container \"e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068\": container with ID starting with e4b640871fde3eb58ffdfe1562ebf71a7a3c20e8d26b1d3dba0cf16bbc2f6068 not found: ID does not exist" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.743404 4774 scope.go:117] "RemoveContainer" containerID="5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.747504 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae\": container with ID starting with 5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae not found: ID does not exist" containerID="5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.747584 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae"} err="failed to get container status \"5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae\": rpc error: code = NotFound desc = could not find container \"5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae\": container with ID starting with 5c31a5255a4d635428aeb73fa341700980291b716d21c31ecccf3ba26bb040ae not found: ID does not exist" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.747636 4774 scope.go:117] "RemoveContainer" containerID="88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b" Nov 21 14:25:55 crc kubenswrapper[4774]: E1121 14:25:55.751459 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b\": container with ID starting with 88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b not found: ID does not exist" containerID="88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.751590 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b"} err="failed to get container status \"88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b\": rpc error: code = NotFound desc = could not find container \"88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b\": container with ID starting with 88b237d6f1ee03ba01f238f6b81d1d48d18a7327777dec4f11b148ccf1e7276b not found: ID does not exist" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.833838 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-config-data\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.833931 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.833968 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5mxq\" (UniqueName: \"kubernetes.io/projected/5c9810b1-2950-4d27-8b3c-4695c781ec2f-kube-api-access-j5mxq\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.834000 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-log-httpd\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.834024 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.834156 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-run-httpd\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.834198 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-scripts\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.938286 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-scripts\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.939325 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-config-data\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.939498 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.939541 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5mxq\" (UniqueName: \"kubernetes.io/projected/5c9810b1-2950-4d27-8b3c-4695c781ec2f-kube-api-access-j5mxq\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.939577 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-log-httpd\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.939605 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.939737 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-run-httpd\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.940443 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-log-httpd\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.941146 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-run-httpd\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.945474 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-config-data\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.945507 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.946146 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-scripts\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.957889 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:55 crc kubenswrapper[4774]: I1121 14:25:55.965646 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5mxq\" (UniqueName: \"kubernetes.io/projected/5c9810b1-2950-4d27-8b3c-4695c781ec2f-kube-api-access-j5mxq\") pod \"ceilometer-0\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " pod="openstack/ceilometer-0" Nov 21 14:25:56 crc kubenswrapper[4774]: I1121 14:25:56.012427 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:25:56 crc kubenswrapper[4774]: I1121 14:25:56.120411 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03cb0172-9ef5-4318-b9d8-c8c3b3a82c38" path="/var/lib/kubelet/pods/03cb0172-9ef5-4318-b9d8-c8c3b3a82c38/volumes" Nov 21 14:25:56 crc kubenswrapper[4774]: I1121 14:25:56.545064 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:56 crc kubenswrapper[4774]: W1121 14:25:56.564286 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c9810b1_2950_4d27_8b3c_4695c781ec2f.slice/crio-5e8948165443783d7fcd0a61f383d5458f842a666bb4a280ce58707c8ba3b571 WatchSource:0}: Error finding container 5e8948165443783d7fcd0a61f383d5458f842a666bb4a280ce58707c8ba3b571: Status 404 returned error can't find the container with id 5e8948165443783d7fcd0a61f383d5458f842a666bb4a280ce58707c8ba3b571 Nov 21 14:25:56 crc kubenswrapper[4774]: I1121 14:25:56.829739 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:56 crc kubenswrapper[4774]: I1121 14:25:56.830266 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 14:25:57 crc kubenswrapper[4774]: I1121 14:25:57.432803 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 14:25:57 crc kubenswrapper[4774]: I1121 14:25:57.597737 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerStarted","Data":"5e8948165443783d7fcd0a61f383d5458f842a666bb4a280ce58707c8ba3b571"} Nov 21 14:25:57 crc kubenswrapper[4774]: I1121 14:25:57.832912 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:25:59 crc kubenswrapper[4774]: I1121 14:25:59.734685 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:25:59 crc kubenswrapper[4774]: I1121 14:25:59.735025 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:26:03 crc kubenswrapper[4774]: I1121 14:26:03.801478 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerStarted","Data":"3646c7a7b4a59cf8faac055a3a99c9396759432b2d9171cb5209cf7de7fe78dd"} Nov 21 14:26:03 crc kubenswrapper[4774]: I1121 14:26:03.804954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerStarted","Data":"b88ce6f8a7f037166486313520b9aca5b724dc5efbb91d298475e4a809ef1711"} Nov 21 14:26:03 crc kubenswrapper[4774]: I1121 14:26:03.805244 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" event={"ID":"b13000ea-9b2c-47fe-aa5f-3e1de9f83511","Type":"ContainerStarted","Data":"f1f0758e5401f409b81f484636dad8ae36a7270239b242629d5b05458d87db34"} Nov 21 14:26:03 crc kubenswrapper[4774]: I1121 14:26:03.838657 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" podStartSLOduration=2.114675983 podStartE2EDuration="11.838634156s" podCreationTimestamp="2025-11-21 14:25:52 +0000 UTC" firstStartedPulling="2025-11-21 14:25:53.103413834 +0000 UTC m=+1343.755613093" lastFinishedPulling="2025-11-21 14:26:02.827372007 +0000 UTC m=+1353.479571266" observedRunningTime="2025-11-21 14:26:03.832006084 +0000 UTC m=+1354.484205333" watchObservedRunningTime="2025-11-21 14:26:03.838634156 +0000 UTC m=+1354.490833415" Nov 21 14:26:04 crc kubenswrapper[4774]: I1121 14:26:04.824767 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerStarted","Data":"0df29cb95e2245ee053929e0352d608565c2a13d20c124b61ba0b5742861a184"} Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.848182 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerStarted","Data":"7af956b593695d0dd59156e52a861a34cc5955e763a5ea2bc6e848a7f8cf9bf6"} Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.848754 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-central-agent" containerID="cri-o://b88ce6f8a7f037166486313520b9aca5b724dc5efbb91d298475e4a809ef1711" gracePeriod=30 Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.848798 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="proxy-httpd" containerID="cri-o://7af956b593695d0dd59156e52a861a34cc5955e763a5ea2bc6e848a7f8cf9bf6" gracePeriod=30 Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.848924 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-notification-agent" containerID="cri-o://3646c7a7b4a59cf8faac055a3a99c9396759432b2d9171cb5209cf7de7fe78dd" gracePeriod=30 Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.848810 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="sg-core" containerID="cri-o://0df29cb95e2245ee053929e0352d608565c2a13d20c124b61ba0b5742861a184" gracePeriod=30 Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.849101 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:26:06 crc kubenswrapper[4774]: I1121 14:26:06.879270 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.035404164 podStartE2EDuration="11.879240933s" podCreationTimestamp="2025-11-21 14:25:55 +0000 UTC" firstStartedPulling="2025-11-21 14:25:56.574200039 +0000 UTC m=+1347.226399288" lastFinishedPulling="2025-11-21 14:26:06.418036798 +0000 UTC m=+1357.070236057" observedRunningTime="2025-11-21 14:26:06.872536029 +0000 UTC m=+1357.524735328" watchObservedRunningTime="2025-11-21 14:26:06.879240933 +0000 UTC m=+1357.531440192" Nov 21 14:26:07 crc kubenswrapper[4774]: I1121 14:26:07.882542 4774 generic.go:334] "Generic (PLEG): container finished" podID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerID="7af956b593695d0dd59156e52a861a34cc5955e763a5ea2bc6e848a7f8cf9bf6" exitCode=0 Nov 21 14:26:07 crc kubenswrapper[4774]: I1121 14:26:07.883015 4774 generic.go:334] "Generic (PLEG): container finished" podID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerID="0df29cb95e2245ee053929e0352d608565c2a13d20c124b61ba0b5742861a184" exitCode=2 Nov 21 14:26:07 crc kubenswrapper[4774]: I1121 14:26:07.883055 4774 generic.go:334] "Generic (PLEG): container finished" podID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerID="3646c7a7b4a59cf8faac055a3a99c9396759432b2d9171cb5209cf7de7fe78dd" exitCode=0 Nov 21 14:26:07 crc kubenswrapper[4774]: I1121 14:26:07.883083 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerDied","Data":"7af956b593695d0dd59156e52a861a34cc5955e763a5ea2bc6e848a7f8cf9bf6"} Nov 21 14:26:07 crc kubenswrapper[4774]: I1121 14:26:07.883140 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerDied","Data":"0df29cb95e2245ee053929e0352d608565c2a13d20c124b61ba0b5742861a184"} Nov 21 14:26:07 crc kubenswrapper[4774]: I1121 14:26:07.883151 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerDied","Data":"3646c7a7b4a59cf8faac055a3a99c9396759432b2d9171cb5209cf7de7fe78dd"} Nov 21 14:26:11 crc kubenswrapper[4774]: I1121 14:26:11.933232 4774 generic.go:334] "Generic (PLEG): container finished" podID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerID="b88ce6f8a7f037166486313520b9aca5b724dc5efbb91d298475e4a809ef1711" exitCode=0 Nov 21 14:26:11 crc kubenswrapper[4774]: I1121 14:26:11.933338 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerDied","Data":"b88ce6f8a7f037166486313520b9aca5b724dc5efbb91d298475e4a809ef1711"} Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.024460 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.126235 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-config-data\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.126320 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-log-httpd\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.126357 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-sg-core-conf-yaml\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.126418 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5mxq\" (UniqueName: \"kubernetes.io/projected/5c9810b1-2950-4d27-8b3c-4695c781ec2f-kube-api-access-j5mxq\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.127188 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.127464 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-combined-ca-bundle\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.128110 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-scripts\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.128411 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-run-httpd\") pod \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\" (UID: \"5c9810b1-2950-4d27-8b3c-4695c781ec2f\") " Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.129530 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.130535 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.130611 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c9810b1-2950-4d27-8b3c-4695c781ec2f-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.132407 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c9810b1-2950-4d27-8b3c-4695c781ec2f-kube-api-access-j5mxq" (OuterVolumeSpecName: "kube-api-access-j5mxq") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "kube-api-access-j5mxq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.135157 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-scripts" (OuterVolumeSpecName: "scripts") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.166008 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.233865 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.233911 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5mxq\" (UniqueName: \"kubernetes.io/projected/5c9810b1-2950-4d27-8b3c-4695c781ec2f-kube-api-access-j5mxq\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.233930 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.261085 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.289613 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-config-data" (OuterVolumeSpecName: "config-data") pod "5c9810b1-2950-4d27-8b3c-4695c781ec2f" (UID: "5c9810b1-2950-4d27-8b3c-4695c781ec2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.336734 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.336919 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9810b1-2950-4d27-8b3c-4695c781ec2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.947214 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5c9810b1-2950-4d27-8b3c-4695c781ec2f","Type":"ContainerDied","Data":"5e8948165443783d7fcd0a61f383d5458f842a666bb4a280ce58707c8ba3b571"} Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.947278 4774 scope.go:117] "RemoveContainer" containerID="7af956b593695d0dd59156e52a861a34cc5955e763a5ea2bc6e848a7f8cf9bf6" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.947443 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.989513 4774 scope.go:117] "RemoveContainer" containerID="0df29cb95e2245ee053929e0352d608565c2a13d20c124b61ba0b5742861a184" Nov 21 14:26:12 crc kubenswrapper[4774]: I1121 14:26:12.990748 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.000394 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.016183 4774 scope.go:117] "RemoveContainer" containerID="3646c7a7b4a59cf8faac055a3a99c9396759432b2d9171cb5209cf7de7fe78dd" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.028870 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:13 crc kubenswrapper[4774]: E1121 14:26:13.029466 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-notification-agent" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.029494 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-notification-agent" Nov 21 14:26:13 crc kubenswrapper[4774]: E1121 14:26:13.029543 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="sg-core" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.029551 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="sg-core" Nov 21 14:26:13 crc kubenswrapper[4774]: E1121 14:26:13.029578 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-central-agent" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.029593 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-central-agent" Nov 21 14:26:13 crc kubenswrapper[4774]: E1121 14:26:13.029611 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="proxy-httpd" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.029620 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="proxy-httpd" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.030051 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-notification-agent" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.030097 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="sg-core" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.030109 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="proxy-httpd" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.030118 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" containerName="ceilometer-central-agent" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.032733 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.036559 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.037078 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.039150 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.048631 4774 scope.go:117] "RemoveContainer" containerID="b88ce6f8a7f037166486313520b9aca5b724dc5efbb91d298475e4a809ef1711" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152034 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152098 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-run-httpd\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152124 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-log-httpd\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152204 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-scripts\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152240 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152273 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-config-data\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.152307 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8hqq\" (UniqueName: \"kubernetes.io/projected/9a4ee847-f087-4d23-a078-96245826a60c-kube-api-access-f8hqq\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.254406 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.254532 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-log-httpd\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.254558 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-run-httpd\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.255203 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-log-httpd\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.255259 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-run-httpd\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.255456 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-scripts\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.255521 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.255620 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-config-data\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.255665 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8hqq\" (UniqueName: \"kubernetes.io/projected/9a4ee847-f087-4d23-a078-96245826a60c-kube-api-access-f8hqq\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.260960 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-scripts\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.261530 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-config-data\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.261549 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.275906 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.288619 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8hqq\" (UniqueName: \"kubernetes.io/projected/9a4ee847-f087-4d23-a078-96245826a60c-kube-api-access-f8hqq\") pod \"ceilometer-0\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.364239 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.826671 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:13 crc kubenswrapper[4774]: I1121 14:26:13.961985 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerStarted","Data":"abcd43c9b157b0420d30f51972a838b3fa7f1dee61c60cc2d23031d1c94724f2"} Nov 21 14:26:14 crc kubenswrapper[4774]: I1121 14:26:14.106468 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c9810b1-2950-4d27-8b3c-4695c781ec2f" path="/var/lib/kubelet/pods/5c9810b1-2950-4d27-8b3c-4695c781ec2f/volumes" Nov 21 14:26:14 crc kubenswrapper[4774]: I1121 14:26:14.987089 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerStarted","Data":"cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae"} Nov 21 14:26:16 crc kubenswrapper[4774]: I1121 14:26:16.001021 4774 generic.go:334] "Generic (PLEG): container finished" podID="b13000ea-9b2c-47fe-aa5f-3e1de9f83511" containerID="f1f0758e5401f409b81f484636dad8ae36a7270239b242629d5b05458d87db34" exitCode=0 Nov 21 14:26:16 crc kubenswrapper[4774]: I1121 14:26:16.001075 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" event={"ID":"b13000ea-9b2c-47fe-aa5f-3e1de9f83511","Type":"ContainerDied","Data":"f1f0758e5401f409b81f484636dad8ae36a7270239b242629d5b05458d87db34"} Nov 21 14:26:16 crc kubenswrapper[4774]: I1121 14:26:16.008070 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerStarted","Data":"d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e"} Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.024174 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerStarted","Data":"3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06"} Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.433404 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.563934 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-scripts\") pod \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.564139 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n42bz\" (UniqueName: \"kubernetes.io/projected/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-kube-api-access-n42bz\") pod \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.565437 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-config-data\") pod \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.565543 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-combined-ca-bundle\") pod \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\" (UID: \"b13000ea-9b2c-47fe-aa5f-3e1de9f83511\") " Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.571799 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-kube-api-access-n42bz" (OuterVolumeSpecName: "kube-api-access-n42bz") pod "b13000ea-9b2c-47fe-aa5f-3e1de9f83511" (UID: "b13000ea-9b2c-47fe-aa5f-3e1de9f83511"). InnerVolumeSpecName "kube-api-access-n42bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.574056 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-scripts" (OuterVolumeSpecName: "scripts") pod "b13000ea-9b2c-47fe-aa5f-3e1de9f83511" (UID: "b13000ea-9b2c-47fe-aa5f-3e1de9f83511"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.607065 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b13000ea-9b2c-47fe-aa5f-3e1de9f83511" (UID: "b13000ea-9b2c-47fe-aa5f-3e1de9f83511"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.607525 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-config-data" (OuterVolumeSpecName: "config-data") pod "b13000ea-9b2c-47fe-aa5f-3e1de9f83511" (UID: "b13000ea-9b2c-47fe-aa5f-3e1de9f83511"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.668165 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.668629 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.668649 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n42bz\" (UniqueName: \"kubernetes.io/projected/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-kube-api-access-n42bz\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:17 crc kubenswrapper[4774]: I1121 14:26:17.668664 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13000ea-9b2c-47fe-aa5f-3e1de9f83511-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.036286 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" event={"ID":"b13000ea-9b2c-47fe-aa5f-3e1de9f83511","Type":"ContainerDied","Data":"48a223b454d982b7e6cf0254c0f53c4465d581b446a4461f1e07a4c02e465419"} Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.036340 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48a223b454d982b7e6cf0254c0f53c4465d581b446a4461f1e07a4c02e465419" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.036667 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-9vfmw" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.042440 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerStarted","Data":"dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6"} Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.043202 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.080166 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.3440353529999998 podStartE2EDuration="6.08013545s" podCreationTimestamp="2025-11-21 14:26:12 +0000 UTC" firstStartedPulling="2025-11-21 14:26:13.835601422 +0000 UTC m=+1364.487800681" lastFinishedPulling="2025-11-21 14:26:17.571701519 +0000 UTC m=+1368.223900778" observedRunningTime="2025-11-21 14:26:18.07045667 +0000 UTC m=+1368.722655939" watchObservedRunningTime="2025-11-21 14:26:18.08013545 +0000 UTC m=+1368.732334709" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.160931 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 14:26:18 crc kubenswrapper[4774]: E1121 14:26:18.161500 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b13000ea-9b2c-47fe-aa5f-3e1de9f83511" containerName="nova-cell0-conductor-db-sync" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.161527 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b13000ea-9b2c-47fe-aa5f-3e1de9f83511" containerName="nova-cell0-conductor-db-sync" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.161862 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b13000ea-9b2c-47fe-aa5f-3e1de9f83511" containerName="nova-cell0-conductor-db-sync" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.162683 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.169764 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-lh6tx" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.172010 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.178499 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.281505 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h7xw\" (UniqueName: \"kubernetes.io/projected/531a66a5-f4c9-44f1-83a7-a3e4292fef52-kube-api-access-4h7xw\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.281707 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.281749 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.383923 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h7xw\" (UniqueName: \"kubernetes.io/projected/531a66a5-f4c9-44f1-83a7-a3e4292fef52-kube-api-access-4h7xw\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.384049 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.384082 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.390349 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.394908 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.404249 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h7xw\" (UniqueName: \"kubernetes.io/projected/531a66a5-f4c9-44f1-83a7-a3e4292fef52-kube-api-access-4h7xw\") pod \"nova-cell0-conductor-0\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.482533 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:18 crc kubenswrapper[4774]: I1121 14:26:18.972420 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 14:26:18 crc kubenswrapper[4774]: W1121 14:26:18.983857 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod531a66a5_f4c9_44f1_83a7_a3e4292fef52.slice/crio-32a8ff46a80cd8b2653390e9bc5dc5bc1ef3f604f02adbd3cea1ddad69a05e2c WatchSource:0}: Error finding container 32a8ff46a80cd8b2653390e9bc5dc5bc1ef3f604f02adbd3cea1ddad69a05e2c: Status 404 returned error can't find the container with id 32a8ff46a80cd8b2653390e9bc5dc5bc1ef3f604f02adbd3cea1ddad69a05e2c Nov 21 14:26:19 crc kubenswrapper[4774]: I1121 14:26:19.054961 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"531a66a5-f4c9-44f1-83a7-a3e4292fef52","Type":"ContainerStarted","Data":"32a8ff46a80cd8b2653390e9bc5dc5bc1ef3f604f02adbd3cea1ddad69a05e2c"} Nov 21 14:26:20 crc kubenswrapper[4774]: I1121 14:26:20.068062 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"531a66a5-f4c9-44f1-83a7-a3e4292fef52","Type":"ContainerStarted","Data":"99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded"} Nov 21 14:26:20 crc kubenswrapper[4774]: I1121 14:26:20.069850 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:20 crc kubenswrapper[4774]: I1121 14:26:20.098561 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.098533181 podStartE2EDuration="2.098533181s" podCreationTimestamp="2025-11-21 14:26:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:20.093160436 +0000 UTC m=+1370.745359705" watchObservedRunningTime="2025-11-21 14:26:20.098533181 +0000 UTC m=+1370.750732440" Nov 21 14:26:28 crc kubenswrapper[4774]: I1121 14:26:28.512286 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.073511 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-ggjhq"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.074781 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.078389 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.078516 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.103494 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ggjhq"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.115794 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-config-data\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.117589 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.117951 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvnx9\" (UniqueName: \"kubernetes.io/projected/d99ed036-dfda-4b15-88b9-cdfed30626b5-kube-api-access-cvnx9\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.118017 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-scripts\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.221514 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvnx9\" (UniqueName: \"kubernetes.io/projected/d99ed036-dfda-4b15-88b9-cdfed30626b5-kube-api-access-cvnx9\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.221632 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-scripts\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.222042 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-config-data\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.222218 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.251908 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.252128 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-scripts\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.251943 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-config-data\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.269474 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvnx9\" (UniqueName: \"kubernetes.io/projected/d99ed036-dfda-4b15-88b9-cdfed30626b5-kube-api-access-cvnx9\") pod \"nova-cell0-cell-mapping-ggjhq\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.329079 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.331762 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.337398 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.363870 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.400455 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.404072 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.409414 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.410439 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.424482 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.525832 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.528180 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.528254 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szp97\" (UniqueName: \"kubernetes.io/projected/b46021eb-b966-48c2-a988-c050546ce0d6-kube-api-access-szp97\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.528315 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.529146 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.529961 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f57lj\" (UniqueName: \"kubernetes.io/projected/659840ff-0364-4f9c-881d-ab33bbc42c09-kube-api-access-f57lj\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.530002 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.530038 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-config-data\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.530100 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/659840ff-0364-4f9c-881d-ab33bbc42c09-logs\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.535523 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.568869 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.600955 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.601029 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.601093 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.602015 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50f0abd54c499ac14c722ce78a1be249e3c65fdd8bde5f56a8b5c580514c52ff"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.602079 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://50f0abd54c499ac14c722ce78a1be249e3c65fdd8bde5f56a8b5c580514c52ff" gracePeriod=600 Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.605553 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.607397 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.611351 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.623423 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.663085 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.663620 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08c60551-7470-4a5e-93aa-e2513d0e4614-logs\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664167 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f57lj\" (UniqueName: \"kubernetes.io/projected/659840ff-0364-4f9c-881d-ab33bbc42c09-kube-api-access-f57lj\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664215 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664242 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvhdd\" (UniqueName: \"kubernetes.io/projected/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-kube-api-access-fvhdd\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664264 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-config-data\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664292 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/659840ff-0364-4f9c-881d-ab33bbc42c09-logs\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664342 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-config-data\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664433 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664452 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-config-data\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664483 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqp2v\" (UniqueName: \"kubernetes.io/projected/08c60551-7470-4a5e-93aa-e2513d0e4614-kube-api-access-gqp2v\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664504 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szp97\" (UniqueName: \"kubernetes.io/projected/b46021eb-b966-48c2-a988-c050546ce0d6-kube-api-access-szp97\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664555 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.664577 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.668970 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/659840ff-0364-4f9c-881d-ab33bbc42c09-logs\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.673419 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.675093 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.676978 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.699330 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-config-data\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.704241 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f57lj\" (UniqueName: \"kubernetes.io/projected/659840ff-0364-4f9c-881d-ab33bbc42c09-kube-api-access-f57lj\") pod \"nova-api-0\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " pod="openstack/nova-api-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.711223 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b9c9d97f9-8wtgk"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.714124 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.724666 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b9c9d97f9-8wtgk"] Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.731692 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szp97\" (UniqueName: \"kubernetes.io/projected/b46021eb-b966-48c2-a988-c050546ce0d6-kube-api-access-szp97\") pod \"nova-cell1-novncproxy-0\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.741711 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766329 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766395 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766447 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08c60551-7470-4a5e-93aa-e2513d0e4614-logs\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766490 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvhdd\" (UniqueName: \"kubernetes.io/projected/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-kube-api-access-fvhdd\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766539 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-config-data\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766598 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-config-data\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.766633 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqp2v\" (UniqueName: \"kubernetes.io/projected/08c60551-7470-4a5e-93aa-e2513d0e4614-kube-api-access-gqp2v\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.772776 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08c60551-7470-4a5e-93aa-e2513d0e4614-logs\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.776798 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.783590 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-config-data\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.791727 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-config-data\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.791938 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.800887 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvhdd\" (UniqueName: \"kubernetes.io/projected/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-kube-api-access-fvhdd\") pod \"nova-scheduler-0\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.801529 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqp2v\" (UniqueName: \"kubernetes.io/projected/08c60551-7470-4a5e-93aa-e2513d0e4614-kube-api-access-gqp2v\") pod \"nova-metadata-0\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.868830 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-config\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.868901 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8rwh\" (UniqueName: \"kubernetes.io/projected/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-kube-api-access-k8rwh\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.868928 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.869130 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.869452 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-svc\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.869502 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.869526 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.960887 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.975915 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-svc\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.975981 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.976006 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.976059 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-config\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.976091 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8rwh\" (UniqueName: \"kubernetes.io/projected/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-kube-api-access-k8rwh\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.976115 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.976995 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-nb\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.977563 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-svc\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.978252 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-swift-storage-0\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.980539 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-sb\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:29 crc kubenswrapper[4774]: I1121 14:26:29.981341 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-config\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.000524 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.009021 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8rwh\" (UniqueName: \"kubernetes.io/projected/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-kube-api-access-k8rwh\") pod \"dnsmasq-dns-6b9c9d97f9-8wtgk\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.065785 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.151432 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ggjhq"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.242438 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="50f0abd54c499ac14c722ce78a1be249e3c65fdd8bde5f56a8b5c580514c52ff" exitCode=0 Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.242507 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"50f0abd54c499ac14c722ce78a1be249e3c65fdd8bde5f56a8b5c580514c52ff"} Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.242538 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d"} Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.242566 4774 scope.go:117] "RemoveContainer" containerID="3354716a800c28fc56d313636d4868697077dddaabfb1fc36da33f6ee413381b" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.344808 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4zrq5"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.346793 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.354699 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.355682 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.373922 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4zrq5"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.413268 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.475370 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.507126 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-scripts\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.507528 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-config-data\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.507634 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.507690 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm4zz\" (UniqueName: \"kubernetes.io/projected/3f21c9fd-3364-4328-8717-c25f82fe8d02-kube-api-access-pm4zz\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.614450 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-config-data\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.614545 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.614571 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm4zz\" (UniqueName: \"kubernetes.io/projected/3f21c9fd-3364-4328-8717-c25f82fe8d02-kube-api-access-pm4zz\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.614931 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-scripts\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.621883 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-scripts\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.621986 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.622194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-config-data\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.635474 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm4zz\" (UniqueName: \"kubernetes.io/projected/3f21c9fd-3364-4328-8717-c25f82fe8d02-kube-api-access-pm4zz\") pod \"nova-cell1-conductor-db-sync-4zrq5\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.688429 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.708288 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.726292 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:30 crc kubenswrapper[4774]: W1121 14:26:30.898338 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2bdd63f_eb21_4fdc_9aca_9ff3148dccad.slice/crio-f40c808ec77877b24e14083387396fc219cbe0388be932a053234a78f9a470a1 WatchSource:0}: Error finding container f40c808ec77877b24e14083387396fc219cbe0388be932a053234a78f9a470a1: Status 404 returned error can't find the container with id f40c808ec77877b24e14083387396fc219cbe0388be932a053234a78f9a470a1 Nov 21 14:26:30 crc kubenswrapper[4774]: I1121 14:26:30.898363 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b9c9d97f9-8wtgk"] Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.265536 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ggjhq" event={"ID":"d99ed036-dfda-4b15-88b9-cdfed30626b5","Type":"ContainerStarted","Data":"e14fad553b5ebf16a48988366becd3b511703877d857f82b3dff805dbc486ccb"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.266033 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ggjhq" event={"ID":"d99ed036-dfda-4b15-88b9-cdfed30626b5","Type":"ContainerStarted","Data":"d7c3f9a6d036c4400af26dcdaebe617d887af6cbc233104a4743db87a3f89728"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.272052 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"08c60551-7470-4a5e-93aa-e2513d0e4614","Type":"ContainerStarted","Data":"b2c0dc9ac28a432f412be34d3bc73c4e9d7e44757cc76515d25d1eeceb112a25"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.276366 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"659840ff-0364-4f9c-881d-ab33bbc42c09","Type":"ContainerStarted","Data":"eac174a471310e8649f9463816bafeafac1ef2606a4e9fc1af710b7bfd501e02"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.277724 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" event={"ID":"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad","Type":"ContainerStarted","Data":"f40c808ec77877b24e14083387396fc219cbe0388be932a053234a78f9a470a1"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.289164 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"95c540a0-ab8c-4f25-9f17-cc73c4a89cca","Type":"ContainerStarted","Data":"990e3082d4dfb6c81991ae974e44a6223a7972f301e317a8a2c92f775cf959f9"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.294546 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b46021eb-b966-48c2-a988-c050546ce0d6","Type":"ContainerStarted","Data":"b8991e8a11d96c002effbb4c5f7ac294701d818839e237ac0e31e5c34e2a6968"} Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.309303 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-ggjhq" podStartSLOduration=2.309279712 podStartE2EDuration="2.309279712s" podCreationTimestamp="2025-11-21 14:26:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:31.294087303 +0000 UTC m=+1381.946286562" watchObservedRunningTime="2025-11-21 14:26:31.309279712 +0000 UTC m=+1381.961478971" Nov 21 14:26:31 crc kubenswrapper[4774]: I1121 14:26:31.322051 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4zrq5"] Nov 21 14:26:32 crc kubenswrapper[4774]: I1121 14:26:32.337786 4774 generic.go:334] "Generic (PLEG): container finished" podID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerID="fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2" exitCode=0 Nov 21 14:26:32 crc kubenswrapper[4774]: I1121 14:26:32.338382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" event={"ID":"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad","Type":"ContainerDied","Data":"fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2"} Nov 21 14:26:32 crc kubenswrapper[4774]: I1121 14:26:32.341318 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" event={"ID":"3f21c9fd-3364-4328-8717-c25f82fe8d02","Type":"ContainerStarted","Data":"a43a8569084f224a0dc3ea5dc22f601537d3a46a2926db122b669c10e28a2c1a"} Nov 21 14:26:32 crc kubenswrapper[4774]: I1121 14:26:32.341367 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" event={"ID":"3f21c9fd-3364-4328-8717-c25f82fe8d02","Type":"ContainerStarted","Data":"6d41ac3f447e816971a3ad8b1ccc4020fa27433e62c1b01c4e5b8ba327c4ee57"} Nov 21 14:26:32 crc kubenswrapper[4774]: I1121 14:26:32.394166 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" podStartSLOduration=2.39412079 podStartE2EDuration="2.39412079s" podCreationTimestamp="2025-11-21 14:26:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:32.38271351 +0000 UTC m=+1383.034912779" watchObservedRunningTime="2025-11-21 14:26:32.39412079 +0000 UTC m=+1383.046320059" Nov 21 14:26:34 crc kubenswrapper[4774]: I1121 14:26:34.062998 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:34 crc kubenswrapper[4774]: I1121 14:26:34.090606 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.395502 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" event={"ID":"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad","Type":"ContainerStarted","Data":"78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.396165 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.400300 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"95c540a0-ab8c-4f25-9f17-cc73c4a89cca","Type":"ContainerStarted","Data":"bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.402283 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b46021eb-b966-48c2-a988-c050546ce0d6","Type":"ContainerStarted","Data":"492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.402323 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="b46021eb-b966-48c2-a988-c050546ce0d6" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704" gracePeriod=30 Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.404846 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"08c60551-7470-4a5e-93aa-e2513d0e4614","Type":"ContainerStarted","Data":"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.404889 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"08c60551-7470-4a5e-93aa-e2513d0e4614","Type":"ContainerStarted","Data":"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.405016 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-log" containerID="cri-o://eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b" gracePeriod=30 Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.405108 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-metadata" containerID="cri-o://13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47" gracePeriod=30 Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.416455 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"659840ff-0364-4f9c-881d-ab33bbc42c09","Type":"ContainerStarted","Data":"4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.416639 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"659840ff-0364-4f9c-881d-ab33bbc42c09","Type":"ContainerStarted","Data":"c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee"} Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.443626 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" podStartSLOduration=6.443601163 podStartE2EDuration="6.443601163s" podCreationTimestamp="2025-11-21 14:26:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:35.418157367 +0000 UTC m=+1386.070356636" watchObservedRunningTime="2025-11-21 14:26:35.443601163 +0000 UTC m=+1386.095800422" Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.444953 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.155388387 podStartE2EDuration="6.444938942s" podCreationTimestamp="2025-11-21 14:26:29 +0000 UTC" firstStartedPulling="2025-11-21 14:26:30.681584613 +0000 UTC m=+1381.333783872" lastFinishedPulling="2025-11-21 14:26:33.971135168 +0000 UTC m=+1384.623334427" observedRunningTime="2025-11-21 14:26:35.441050259 +0000 UTC m=+1386.093249518" watchObservedRunningTime="2025-11-21 14:26:35.444938942 +0000 UTC m=+1386.097138201" Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.465993 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.000996703 podStartE2EDuration="6.4659626s" podCreationTimestamp="2025-11-21 14:26:29 +0000 UTC" firstStartedPulling="2025-11-21 14:26:30.509475437 +0000 UTC m=+1381.161674696" lastFinishedPulling="2025-11-21 14:26:33.974441334 +0000 UTC m=+1384.626640593" observedRunningTime="2025-11-21 14:26:35.456732423 +0000 UTC m=+1386.108931702" watchObservedRunningTime="2025-11-21 14:26:35.4659626 +0000 UTC m=+1386.118161869" Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.477772 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.871375864 podStartE2EDuration="6.47774578s" podCreationTimestamp="2025-11-21 14:26:29 +0000 UTC" firstStartedPulling="2025-11-21 14:26:30.419043522 +0000 UTC m=+1381.071242781" lastFinishedPulling="2025-11-21 14:26:34.025413428 +0000 UTC m=+1384.677612697" observedRunningTime="2025-11-21 14:26:35.469680707 +0000 UTC m=+1386.121879966" watchObservedRunningTime="2025-11-21 14:26:35.47774578 +0000 UTC m=+1386.129945049" Nov 21 14:26:35 crc kubenswrapper[4774]: I1121 14:26:35.497942 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.104879505 podStartE2EDuration="6.497911733s" podCreationTimestamp="2025-11-21 14:26:29 +0000 UTC" firstStartedPulling="2025-11-21 14:26:30.69428352 +0000 UTC m=+1381.346482779" lastFinishedPulling="2025-11-21 14:26:34.087315748 +0000 UTC m=+1384.739515007" observedRunningTime="2025-11-21 14:26:35.487948075 +0000 UTC m=+1386.140147344" watchObservedRunningTime="2025-11-21 14:26:35.497911733 +0000 UTC m=+1386.150110992" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.017335 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.058116 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-config-data\") pod \"08c60551-7470-4a5e-93aa-e2513d0e4614\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.058258 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-combined-ca-bundle\") pod \"08c60551-7470-4a5e-93aa-e2513d0e4614\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.058319 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08c60551-7470-4a5e-93aa-e2513d0e4614-logs\") pod \"08c60551-7470-4a5e-93aa-e2513d0e4614\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.058367 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqp2v\" (UniqueName: \"kubernetes.io/projected/08c60551-7470-4a5e-93aa-e2513d0e4614-kube-api-access-gqp2v\") pod \"08c60551-7470-4a5e-93aa-e2513d0e4614\" (UID: \"08c60551-7470-4a5e-93aa-e2513d0e4614\") " Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.062063 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08c60551-7470-4a5e-93aa-e2513d0e4614-logs" (OuterVolumeSpecName: "logs") pod "08c60551-7470-4a5e-93aa-e2513d0e4614" (UID: "08c60551-7470-4a5e-93aa-e2513d0e4614"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.076419 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08c60551-7470-4a5e-93aa-e2513d0e4614-kube-api-access-gqp2v" (OuterVolumeSpecName: "kube-api-access-gqp2v") pod "08c60551-7470-4a5e-93aa-e2513d0e4614" (UID: "08c60551-7470-4a5e-93aa-e2513d0e4614"). InnerVolumeSpecName "kube-api-access-gqp2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.090944 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08c60551-7470-4a5e-93aa-e2513d0e4614" (UID: "08c60551-7470-4a5e-93aa-e2513d0e4614"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.111436 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-config-data" (OuterVolumeSpecName: "config-data") pod "08c60551-7470-4a5e-93aa-e2513d0e4614" (UID: "08c60551-7470-4a5e-93aa-e2513d0e4614"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.161309 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.161573 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08c60551-7470-4a5e-93aa-e2513d0e4614-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.161588 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08c60551-7470-4a5e-93aa-e2513d0e4614-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.161596 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqp2v\" (UniqueName: \"kubernetes.io/projected/08c60551-7470-4a5e-93aa-e2513d0e4614-kube-api-access-gqp2v\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.441836 4774 generic.go:334] "Generic (PLEG): container finished" podID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerID="13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47" exitCode=0 Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.441885 4774 generic.go:334] "Generic (PLEG): container finished" podID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerID="eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b" exitCode=143 Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.441960 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.441972 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"08c60551-7470-4a5e-93aa-e2513d0e4614","Type":"ContainerDied","Data":"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47"} Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.442038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"08c60551-7470-4a5e-93aa-e2513d0e4614","Type":"ContainerDied","Data":"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b"} Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.442053 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"08c60551-7470-4a5e-93aa-e2513d0e4614","Type":"ContainerDied","Data":"b2c0dc9ac28a432f412be34d3bc73c4e9d7e44757cc76515d25d1eeceb112a25"} Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.442072 4774 scope.go:117] "RemoveContainer" containerID="13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.498828 4774 scope.go:117] "RemoveContainer" containerID="eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.502990 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.524160 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.547892 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:36 crc kubenswrapper[4774]: E1121 14:26:36.548452 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-metadata" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.548475 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-metadata" Nov 21 14:26:36 crc kubenswrapper[4774]: E1121 14:26:36.548517 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-log" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.548524 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-log" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.548724 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-log" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.548741 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" containerName="nova-metadata-metadata" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.549903 4774 scope.go:117] "RemoveContainer" containerID="13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.549982 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: E1121 14:26:36.550613 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47\": container with ID starting with 13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47 not found: ID does not exist" containerID="13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.550658 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47"} err="failed to get container status \"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47\": rpc error: code = NotFound desc = could not find container \"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47\": container with ID starting with 13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47 not found: ID does not exist" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.550690 4774 scope.go:117] "RemoveContainer" containerID="eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b" Nov 21 14:26:36 crc kubenswrapper[4774]: E1121 14:26:36.554001 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b\": container with ID starting with eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b not found: ID does not exist" containerID="eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554053 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b"} err="failed to get container status \"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b\": rpc error: code = NotFound desc = could not find container \"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b\": container with ID starting with eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b not found: ID does not exist" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554087 4774 scope.go:117] "RemoveContainer" containerID="13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554396 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554542 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47"} err="failed to get container status \"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47\": rpc error: code = NotFound desc = could not find container \"13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47\": container with ID starting with 13fd93f895f87eff54342bafa4005ce6123bf9213ba1ded45bb9361e845a4d47 not found: ID does not exist" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554572 4774 scope.go:117] "RemoveContainer" containerID="eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554644 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.554877 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b"} err="failed to get container status \"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b\": rpc error: code = NotFound desc = could not find container \"eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b\": container with ID starting with eeda2dfb68a84dca41fd405671b9dc3edd291857033ef41d2628890fbbffa80b not found: ID does not exist" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.558325 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.571557 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-config-data\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.572556 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfrsj\" (UniqueName: \"kubernetes.io/projected/8823197c-d672-4c7b-85b6-eca799fdd37b-kube-api-access-gfrsj\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.572782 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8823197c-d672-4c7b-85b6-eca799fdd37b-logs\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.573129 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.573271 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.675258 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfrsj\" (UniqueName: \"kubernetes.io/projected/8823197c-d672-4c7b-85b6-eca799fdd37b-kube-api-access-gfrsj\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.675734 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8823197c-d672-4c7b-85b6-eca799fdd37b-logs\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.675914 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.676055 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.676231 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8823197c-d672-4c7b-85b6-eca799fdd37b-logs\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.676385 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-config-data\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.680141 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.680401 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.682291 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-config-data\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.694074 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfrsj\" (UniqueName: \"kubernetes.io/projected/8823197c-d672-4c7b-85b6-eca799fdd37b-kube-api-access-gfrsj\") pod \"nova-metadata-0\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " pod="openstack/nova-metadata-0" Nov 21 14:26:36 crc kubenswrapper[4774]: I1121 14:26:36.870412 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:37 crc kubenswrapper[4774]: I1121 14:26:37.319533 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:37 crc kubenswrapper[4774]: W1121 14:26:37.332025 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8823197c_d672_4c7b_85b6_eca799fdd37b.slice/crio-5679b0b8dc03e93cc396ee20dc4eb16e2a3147ff85e1e6bbc9f558425e164939 WatchSource:0}: Error finding container 5679b0b8dc03e93cc396ee20dc4eb16e2a3147ff85e1e6bbc9f558425e164939: Status 404 returned error can't find the container with id 5679b0b8dc03e93cc396ee20dc4eb16e2a3147ff85e1e6bbc9f558425e164939 Nov 21 14:26:37 crc kubenswrapper[4774]: I1121 14:26:37.455255 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8823197c-d672-4c7b-85b6-eca799fdd37b","Type":"ContainerStarted","Data":"5679b0b8dc03e93cc396ee20dc4eb16e2a3147ff85e1e6bbc9f558425e164939"} Nov 21 14:26:38 crc kubenswrapper[4774]: I1121 14:26:38.116312 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08c60551-7470-4a5e-93aa-e2513d0e4614" path="/var/lib/kubelet/pods/08c60551-7470-4a5e-93aa-e2513d0e4614/volumes" Nov 21 14:26:38 crc kubenswrapper[4774]: I1121 14:26:38.475108 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8823197c-d672-4c7b-85b6-eca799fdd37b","Type":"ContainerStarted","Data":"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d"} Nov 21 14:26:38 crc kubenswrapper[4774]: I1121 14:26:38.475162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8823197c-d672-4c7b-85b6-eca799fdd37b","Type":"ContainerStarted","Data":"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3"} Nov 21 14:26:38 crc kubenswrapper[4774]: I1121 14:26:38.497746 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.497708392 podStartE2EDuration="2.497708392s" podCreationTimestamp="2025-11-21 14:26:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:38.492195322 +0000 UTC m=+1389.144394591" watchObservedRunningTime="2025-11-21 14:26:38.497708392 +0000 UTC m=+1389.149907691" Nov 21 14:26:39 crc kubenswrapper[4774]: I1121 14:26:39.488581 4774 generic.go:334] "Generic (PLEG): container finished" podID="d99ed036-dfda-4b15-88b9-cdfed30626b5" containerID="e14fad553b5ebf16a48988366becd3b511703877d857f82b3dff805dbc486ccb" exitCode=0 Nov 21 14:26:39 crc kubenswrapper[4774]: I1121 14:26:39.488673 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ggjhq" event={"ID":"d99ed036-dfda-4b15-88b9-cdfed30626b5","Type":"ContainerDied","Data":"e14fad553b5ebf16a48988366becd3b511703877d857f82b3dff805dbc486ccb"} Nov 21 14:26:39 crc kubenswrapper[4774]: I1121 14:26:39.742988 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:26:39 crc kubenswrapper[4774]: I1121 14:26:39.869580 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 14:26:39 crc kubenswrapper[4774]: I1121 14:26:39.869681 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 14:26:39 crc kubenswrapper[4774]: I1121 14:26:39.902750 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.001625 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.001688 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.068109 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.160393 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7445585cd9-5rcvg"] Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.160732 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerName="dnsmasq-dns" containerID="cri-o://5311aea5b99af3838b2c7927c5d0fe8e8d4e1aa5679eb1dee621e86bd7201b81" gracePeriod=10 Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.502422 4774 generic.go:334] "Generic (PLEG): container finished" podID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerID="5311aea5b99af3838b2c7927c5d0fe8e8d4e1aa5679eb1dee621e86bd7201b81" exitCode=0 Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.502489 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" event={"ID":"89dc3e22-885f-44a0-af00-ecbce936e8f0","Type":"ContainerDied","Data":"5311aea5b99af3838b2c7927c5d0fe8e8d4e1aa5679eb1dee621e86bd7201b81"} Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.565026 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.831349 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.976305 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dsl6\" (UniqueName: \"kubernetes.io/projected/89dc3e22-885f-44a0-af00-ecbce936e8f0-kube-api-access-9dsl6\") pod \"89dc3e22-885f-44a0-af00-ecbce936e8f0\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.976422 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-svc\") pod \"89dc3e22-885f-44a0-af00-ecbce936e8f0\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.976550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-swift-storage-0\") pod \"89dc3e22-885f-44a0-af00-ecbce936e8f0\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.976656 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-config\") pod \"89dc3e22-885f-44a0-af00-ecbce936e8f0\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.976715 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-sb\") pod \"89dc3e22-885f-44a0-af00-ecbce936e8f0\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.976739 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-nb\") pod \"89dc3e22-885f-44a0-af00-ecbce936e8f0\" (UID: \"89dc3e22-885f-44a0-af00-ecbce936e8f0\") " Nov 21 14:26:40 crc kubenswrapper[4774]: I1121 14:26:40.984237 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89dc3e22-885f-44a0-af00-ecbce936e8f0-kube-api-access-9dsl6" (OuterVolumeSpecName: "kube-api-access-9dsl6") pod "89dc3e22-885f-44a0-af00-ecbce936e8f0" (UID: "89dc3e22-885f-44a0-af00-ecbce936e8f0"). InnerVolumeSpecName "kube-api-access-9dsl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.051095 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "89dc3e22-885f-44a0-af00-ecbce936e8f0" (UID: "89dc3e22-885f-44a0-af00-ecbce936e8f0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.056001 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "89dc3e22-885f-44a0-af00-ecbce936e8f0" (UID: "89dc3e22-885f-44a0-af00-ecbce936e8f0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.076311 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "89dc3e22-885f-44a0-af00-ecbce936e8f0" (UID: "89dc3e22-885f-44a0-af00-ecbce936e8f0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.080307 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.080332 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.080344 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dsl6\" (UniqueName: \"kubernetes.io/projected/89dc3e22-885f-44a0-af00-ecbce936e8f0-kube-api-access-9dsl6\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.080357 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.084213 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "89dc3e22-885f-44a0-af00-ecbce936e8f0" (UID: "89dc3e22-885f-44a0-af00-ecbce936e8f0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.094199 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.094463 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.183:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.097211 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-config" (OuterVolumeSpecName: "config") pod "89dc3e22-885f-44a0-af00-ecbce936e8f0" (UID: "89dc3e22-885f-44a0-af00-ecbce936e8f0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.127456 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.182299 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.182348 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/89dc3e22-885f-44a0-af00-ecbce936e8f0-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.283705 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvnx9\" (UniqueName: \"kubernetes.io/projected/d99ed036-dfda-4b15-88b9-cdfed30626b5-kube-api-access-cvnx9\") pod \"d99ed036-dfda-4b15-88b9-cdfed30626b5\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.283775 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-combined-ca-bundle\") pod \"d99ed036-dfda-4b15-88b9-cdfed30626b5\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.283925 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-scripts\") pod \"d99ed036-dfda-4b15-88b9-cdfed30626b5\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.283979 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-config-data\") pod \"d99ed036-dfda-4b15-88b9-cdfed30626b5\" (UID: \"d99ed036-dfda-4b15-88b9-cdfed30626b5\") " Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.287791 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-scripts" (OuterVolumeSpecName: "scripts") pod "d99ed036-dfda-4b15-88b9-cdfed30626b5" (UID: "d99ed036-dfda-4b15-88b9-cdfed30626b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.290094 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d99ed036-dfda-4b15-88b9-cdfed30626b5-kube-api-access-cvnx9" (OuterVolumeSpecName: "kube-api-access-cvnx9") pod "d99ed036-dfda-4b15-88b9-cdfed30626b5" (UID: "d99ed036-dfda-4b15-88b9-cdfed30626b5"). InnerVolumeSpecName "kube-api-access-cvnx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.317131 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d99ed036-dfda-4b15-88b9-cdfed30626b5" (UID: "d99ed036-dfda-4b15-88b9-cdfed30626b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.326386 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-config-data" (OuterVolumeSpecName: "config-data") pod "d99ed036-dfda-4b15-88b9-cdfed30626b5" (UID: "d99ed036-dfda-4b15-88b9-cdfed30626b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.387097 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvnx9\" (UniqueName: \"kubernetes.io/projected/d99ed036-dfda-4b15-88b9-cdfed30626b5-kube-api-access-cvnx9\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.387141 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.387161 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.387178 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99ed036-dfda-4b15-88b9-cdfed30626b5-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.513020 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" event={"ID":"89dc3e22-885f-44a0-af00-ecbce936e8f0","Type":"ContainerDied","Data":"1f456397eb426bbd92e0271e5005892d4c140c205a2f6224911f54debb55887b"} Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.513095 4774 scope.go:117] "RemoveContainer" containerID="5311aea5b99af3838b2c7927c5d0fe8e8d4e1aa5679eb1dee621e86bd7201b81" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.513271 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7445585cd9-5rcvg" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.522359 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ggjhq" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.530213 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ggjhq" event={"ID":"d99ed036-dfda-4b15-88b9-cdfed30626b5","Type":"ContainerDied","Data":"d7c3f9a6d036c4400af26dcdaebe617d887af6cbc233104a4743db87a3f89728"} Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.530605 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7c3f9a6d036c4400af26dcdaebe617d887af6cbc233104a4743db87a3f89728" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.542476 4774 scope.go:117] "RemoveContainer" containerID="03073443effb0b0a18d4fc85e2231f3d0ab56596686b0d287b76bcc883717466" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.568987 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7445585cd9-5rcvg"] Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.577720 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7445585cd9-5rcvg"] Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.696555 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.697119 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-log" containerID="cri-o://c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee" gracePeriod=30 Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.697490 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-api" containerID="cri-o://4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451" gracePeriod=30 Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.773943 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.782286 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.782535 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-log" containerID="cri-o://e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3" gracePeriod=30 Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.783241 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-metadata" containerID="cri-o://ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d" gracePeriod=30 Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.870936 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 14:26:41 crc kubenswrapper[4774]: I1121 14:26:41.870989 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.110444 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" path="/var/lib/kubelet/pods/89dc3e22-885f-44a0-af00-ecbce936e8f0/volumes" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.400718 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.513495 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-nova-metadata-tls-certs\") pod \"8823197c-d672-4c7b-85b6-eca799fdd37b\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.513594 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8823197c-d672-4c7b-85b6-eca799fdd37b-logs\") pod \"8823197c-d672-4c7b-85b6-eca799fdd37b\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.513738 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-combined-ca-bundle\") pod \"8823197c-d672-4c7b-85b6-eca799fdd37b\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.513804 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-config-data\") pod \"8823197c-d672-4c7b-85b6-eca799fdd37b\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.513859 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gfrsj\" (UniqueName: \"kubernetes.io/projected/8823197c-d672-4c7b-85b6-eca799fdd37b-kube-api-access-gfrsj\") pod \"8823197c-d672-4c7b-85b6-eca799fdd37b\" (UID: \"8823197c-d672-4c7b-85b6-eca799fdd37b\") " Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.516404 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8823197c-d672-4c7b-85b6-eca799fdd37b-logs" (OuterVolumeSpecName: "logs") pod "8823197c-d672-4c7b-85b6-eca799fdd37b" (UID: "8823197c-d672-4c7b-85b6-eca799fdd37b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.522790 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8823197c-d672-4c7b-85b6-eca799fdd37b-kube-api-access-gfrsj" (OuterVolumeSpecName: "kube-api-access-gfrsj") pod "8823197c-d672-4c7b-85b6-eca799fdd37b" (UID: "8823197c-d672-4c7b-85b6-eca799fdd37b"). InnerVolumeSpecName "kube-api-access-gfrsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.550911 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-config-data" (OuterVolumeSpecName: "config-data") pod "8823197c-d672-4c7b-85b6-eca799fdd37b" (UID: "8823197c-d672-4c7b-85b6-eca799fdd37b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568012 4774 generic.go:334] "Generic (PLEG): container finished" podID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerID="ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d" exitCode=0 Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568129 4774 generic.go:334] "Generic (PLEG): container finished" podID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerID="e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3" exitCode=143 Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568189 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8823197c-d672-4c7b-85b6-eca799fdd37b","Type":"ContainerDied","Data":"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d"} Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568230 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8823197c-d672-4c7b-85b6-eca799fdd37b","Type":"ContainerDied","Data":"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3"} Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568242 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8823197c-d672-4c7b-85b6-eca799fdd37b","Type":"ContainerDied","Data":"5679b0b8dc03e93cc396ee20dc4eb16e2a3147ff85e1e6bbc9f558425e164939"} Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568261 4774 scope.go:117] "RemoveContainer" containerID="ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.568413 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.580088 4774 generic.go:334] "Generic (PLEG): container finished" podID="3f21c9fd-3364-4328-8717-c25f82fe8d02" containerID="a43a8569084f224a0dc3ea5dc22f601537d3a46a2926db122b669c10e28a2c1a" exitCode=0 Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.580210 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" event={"ID":"3f21c9fd-3364-4328-8717-c25f82fe8d02","Type":"ContainerDied","Data":"a43a8569084f224a0dc3ea5dc22f601537d3a46a2926db122b669c10e28a2c1a"} Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.581811 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8823197c-d672-4c7b-85b6-eca799fdd37b" (UID: "8823197c-d672-4c7b-85b6-eca799fdd37b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.591869 4774 generic.go:334] "Generic (PLEG): container finished" podID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerID="c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee" exitCode=143 Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.591958 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"659840ff-0364-4f9c-881d-ab33bbc42c09","Type":"ContainerDied","Data":"c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee"} Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.592243 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" containerName="nova-scheduler-scheduler" containerID="cri-o://bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e" gracePeriod=30 Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.610933 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "8823197c-d672-4c7b-85b6-eca799fdd37b" (UID: "8823197c-d672-4c7b-85b6-eca799fdd37b"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.616017 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gfrsj\" (UniqueName: \"kubernetes.io/projected/8823197c-d672-4c7b-85b6-eca799fdd37b-kube-api-access-gfrsj\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.616053 4774 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.616066 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8823197c-d672-4c7b-85b6-eca799fdd37b-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.616079 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.616089 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8823197c-d672-4c7b-85b6-eca799fdd37b-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.705905 4774 scope.go:117] "RemoveContainer" containerID="e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.733999 4774 scope.go:117] "RemoveContainer" containerID="ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d" Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.734757 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d\": container with ID starting with ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d not found: ID does not exist" containerID="ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.734805 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d"} err="failed to get container status \"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d\": rpc error: code = NotFound desc = could not find container \"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d\": container with ID starting with ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d not found: ID does not exist" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.734853 4774 scope.go:117] "RemoveContainer" containerID="e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3" Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.735210 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3\": container with ID starting with e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3 not found: ID does not exist" containerID="e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.735230 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3"} err="failed to get container status \"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3\": rpc error: code = NotFound desc = could not find container \"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3\": container with ID starting with e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3 not found: ID does not exist" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.735241 4774 scope.go:117] "RemoveContainer" containerID="ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.735508 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d"} err="failed to get container status \"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d\": rpc error: code = NotFound desc = could not find container \"ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d\": container with ID starting with ede8c61a81d6d73b1aa334326d0cbb2b66410ec61d6bfc70151b969e5d75785d not found: ID does not exist" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.735527 4774 scope.go:117] "RemoveContainer" containerID="e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.735753 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3"} err="failed to get container status \"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3\": rpc error: code = NotFound desc = could not find container \"e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3\": container with ID starting with e67c24a5d4eb98a64cb7d143cac4689f553feabf47c7f956150ca4add0b393e3 not found: ID does not exist" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.913142 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.921153 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.940293 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.941316 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-metadata" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942064 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-metadata" Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.942149 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerName="init" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942220 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerName="init" Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.942317 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerName="dnsmasq-dns" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942381 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerName="dnsmasq-dns" Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.942456 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d99ed036-dfda-4b15-88b9-cdfed30626b5" containerName="nova-manage" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942517 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d99ed036-dfda-4b15-88b9-cdfed30626b5" containerName="nova-manage" Nov 21 14:26:42 crc kubenswrapper[4774]: E1121 14:26:42.942580 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-log" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942633 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-log" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942910 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="89dc3e22-885f-44a0-af00-ecbce936e8f0" containerName="dnsmasq-dns" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.942985 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-log" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.943071 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" containerName="nova-metadata-metadata" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.943203 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d99ed036-dfda-4b15-88b9-cdfed30626b5" containerName="nova-manage" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.944337 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.948689 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.953203 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 21 14:26:42 crc kubenswrapper[4774]: I1121 14:26:42.959727 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.128593 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2pkh\" (UniqueName: \"kubernetes.io/projected/75556fe3-ee8c-4829-ae0e-c79cc249615d-kube-api-access-m2pkh\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.128679 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.128733 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.129064 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75556fe3-ee8c-4829-ae0e-c79cc249615d-logs\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.129242 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-config-data\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.230714 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-config-data\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.230769 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2pkh\" (UniqueName: \"kubernetes.io/projected/75556fe3-ee8c-4829-ae0e-c79cc249615d-kube-api-access-m2pkh\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.230844 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.230874 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.231414 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75556fe3-ee8c-4829-ae0e-c79cc249615d-logs\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.232315 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75556fe3-ee8c-4829-ae0e-c79cc249615d-logs\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.236414 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-config-data\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.237307 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.242189 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.254492 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2pkh\" (UniqueName: \"kubernetes.io/projected/75556fe3-ee8c-4829-ae0e-c79cc249615d-kube-api-access-m2pkh\") pod \"nova-metadata-0\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.269406 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.379580 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.783410 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:26:43 crc kubenswrapper[4774]: I1121 14:26:43.962374 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.048128 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-combined-ca-bundle\") pod \"3f21c9fd-3364-4328-8717-c25f82fe8d02\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.048310 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-scripts\") pod \"3f21c9fd-3364-4328-8717-c25f82fe8d02\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.048472 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm4zz\" (UniqueName: \"kubernetes.io/projected/3f21c9fd-3364-4328-8717-c25f82fe8d02-kube-api-access-pm4zz\") pod \"3f21c9fd-3364-4328-8717-c25f82fe8d02\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.048514 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-config-data\") pod \"3f21c9fd-3364-4328-8717-c25f82fe8d02\" (UID: \"3f21c9fd-3364-4328-8717-c25f82fe8d02\") " Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.054018 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f21c9fd-3364-4328-8717-c25f82fe8d02-kube-api-access-pm4zz" (OuterVolumeSpecName: "kube-api-access-pm4zz") pod "3f21c9fd-3364-4328-8717-c25f82fe8d02" (UID: "3f21c9fd-3364-4328-8717-c25f82fe8d02"). InnerVolumeSpecName "kube-api-access-pm4zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.054454 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-scripts" (OuterVolumeSpecName: "scripts") pod "3f21c9fd-3364-4328-8717-c25f82fe8d02" (UID: "3f21c9fd-3364-4328-8717-c25f82fe8d02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.085959 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-config-data" (OuterVolumeSpecName: "config-data") pod "3f21c9fd-3364-4328-8717-c25f82fe8d02" (UID: "3f21c9fd-3364-4328-8717-c25f82fe8d02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.089164 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f21c9fd-3364-4328-8717-c25f82fe8d02" (UID: "3f21c9fd-3364-4328-8717-c25f82fe8d02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.104559 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8823197c-d672-4c7b-85b6-eca799fdd37b" path="/var/lib/kubelet/pods/8823197c-d672-4c7b-85b6-eca799fdd37b/volumes" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.150475 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.150512 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.150526 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm4zz\" (UniqueName: \"kubernetes.io/projected/3f21c9fd-3364-4328-8717-c25f82fe8d02-kube-api-access-pm4zz\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.150542 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f21c9fd-3364-4328-8717-c25f82fe8d02-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.627141 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" event={"ID":"3f21c9fd-3364-4328-8717-c25f82fe8d02","Type":"ContainerDied","Data":"6d41ac3f447e816971a3ad8b1ccc4020fa27433e62c1b01c4e5b8ba327c4ee57"} Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.627191 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d41ac3f447e816971a3ad8b1ccc4020fa27433e62c1b01c4e5b8ba327c4ee57" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.627205 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-4zrq5" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.631693 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75556fe3-ee8c-4829-ae0e-c79cc249615d","Type":"ContainerStarted","Data":"cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036"} Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.631770 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75556fe3-ee8c-4829-ae0e-c79cc249615d","Type":"ContainerStarted","Data":"1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d"} Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.631794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75556fe3-ee8c-4829-ae0e-c79cc249615d","Type":"ContainerStarted","Data":"dea713ef7f1910c2320df533b639bd7508ed165b171b27b4331fc80f33637914"} Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.711569 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.711545121 podStartE2EDuration="2.711545121s" podCreationTimestamp="2025-11-21 14:26:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:44.706018971 +0000 UTC m=+1395.358218270" watchObservedRunningTime="2025-11-21 14:26:44.711545121 +0000 UTC m=+1395.363744390" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.764999 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 14:26:44 crc kubenswrapper[4774]: E1121 14:26:44.765628 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f21c9fd-3364-4328-8717-c25f82fe8d02" containerName="nova-cell1-conductor-db-sync" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.765656 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f21c9fd-3364-4328-8717-c25f82fe8d02" containerName="nova-cell1-conductor-db-sync" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.766005 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f21c9fd-3364-4328-8717-c25f82fe8d02" containerName="nova-cell1-conductor-db-sync" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.767038 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.777372 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.793180 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.868083 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.868164 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9w9g\" (UniqueName: \"kubernetes.io/projected/81a92903-9f60-4f44-917f-744a2b80a57c-kube-api-access-r9w9g\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.868241 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: E1121 14:26:44.872025 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:26:44 crc kubenswrapper[4774]: E1121 14:26:44.873399 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:26:44 crc kubenswrapper[4774]: E1121 14:26:44.874633 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:26:44 crc kubenswrapper[4774]: E1121 14:26:44.874670 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" containerName="nova-scheduler-scheduler" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.969766 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.969875 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9w9g\" (UniqueName: \"kubernetes.io/projected/81a92903-9f60-4f44-917f-744a2b80a57c-kube-api-access-r9w9g\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.969970 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.974037 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.974383 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:44 crc kubenswrapper[4774]: I1121 14:26:44.986844 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9w9g\" (UniqueName: \"kubernetes.io/projected/81a92903-9f60-4f44-917f-744a2b80a57c-kube-api-access-r9w9g\") pod \"nova-cell1-conductor-0\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:45 crc kubenswrapper[4774]: I1121 14:26:45.098057 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:45 crc kubenswrapper[4774]: I1121 14:26:45.544548 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 14:26:45 crc kubenswrapper[4774]: I1121 14:26:45.645472 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"81a92903-9f60-4f44-917f-744a2b80a57c","Type":"ContainerStarted","Data":"1367553304acf585ee869af4d57fce8ba6a46d4fa49ae9f6d2c93d22109736d3"} Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.658831 4774 generic.go:334] "Generic (PLEG): container finished" podID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" containerID="bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e" exitCode=0 Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.658944 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"95c540a0-ab8c-4f25-9f17-cc73c4a89cca","Type":"ContainerDied","Data":"bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e"} Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.659179 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"95c540a0-ab8c-4f25-9f17-cc73c4a89cca","Type":"ContainerDied","Data":"990e3082d4dfb6c81991ae974e44a6223a7972f301e317a8a2c92f775cf959f9"} Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.659198 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="990e3082d4dfb6c81991ae974e44a6223a7972f301e317a8a2c92f775cf959f9" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.661052 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"81a92903-9f60-4f44-917f-744a2b80a57c","Type":"ContainerStarted","Data":"464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678"} Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.661227 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.683722 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.683691834 podStartE2EDuration="2.683691834s" podCreationTimestamp="2025-11-21 14:26:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:46.675125466 +0000 UTC m=+1397.327324745" watchObservedRunningTime="2025-11-21 14:26:46.683691834 +0000 UTC m=+1397.335891083" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.726218 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.815476 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-config-data\") pod \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.815782 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvhdd\" (UniqueName: \"kubernetes.io/projected/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-kube-api-access-fvhdd\") pod \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.815846 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-combined-ca-bundle\") pod \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\" (UID: \"95c540a0-ab8c-4f25-9f17-cc73c4a89cca\") " Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.822484 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-kube-api-access-fvhdd" (OuterVolumeSpecName: "kube-api-access-fvhdd") pod "95c540a0-ab8c-4f25-9f17-cc73c4a89cca" (UID: "95c540a0-ab8c-4f25-9f17-cc73c4a89cca"). InnerVolumeSpecName "kube-api-access-fvhdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.848010 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95c540a0-ab8c-4f25-9f17-cc73c4a89cca" (UID: "95c540a0-ab8c-4f25-9f17-cc73c4a89cca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.848053 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-config-data" (OuterVolumeSpecName: "config-data") pod "95c540a0-ab8c-4f25-9f17-cc73c4a89cca" (UID: "95c540a0-ab8c-4f25-9f17-cc73c4a89cca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.919015 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvhdd\" (UniqueName: \"kubernetes.io/projected/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-kube-api-access-fvhdd\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.919053 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:46 crc kubenswrapper[4774]: I1121 14:26:46.919064 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95c540a0-ab8c-4f25-9f17-cc73c4a89cca-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.258627 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.259042 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" containerName="kube-state-metrics" containerID="cri-o://c1a2b84c75c549c10af090d8ea02fb0cb0d9fa9237d914a82d014aeeec7b09df" gracePeriod=30 Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.574190 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.638726 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-config-data\") pod \"659840ff-0364-4f9c-881d-ab33bbc42c09\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.638781 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f57lj\" (UniqueName: \"kubernetes.io/projected/659840ff-0364-4f9c-881d-ab33bbc42c09-kube-api-access-f57lj\") pod \"659840ff-0364-4f9c-881d-ab33bbc42c09\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.639042 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/659840ff-0364-4f9c-881d-ab33bbc42c09-logs\") pod \"659840ff-0364-4f9c-881d-ab33bbc42c09\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.639167 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-combined-ca-bundle\") pod \"659840ff-0364-4f9c-881d-ab33bbc42c09\" (UID: \"659840ff-0364-4f9c-881d-ab33bbc42c09\") " Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.640368 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/659840ff-0364-4f9c-881d-ab33bbc42c09-logs" (OuterVolumeSpecName: "logs") pod "659840ff-0364-4f9c-881d-ab33bbc42c09" (UID: "659840ff-0364-4f9c-881d-ab33bbc42c09"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.664176 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/659840ff-0364-4f9c-881d-ab33bbc42c09-kube-api-access-f57lj" (OuterVolumeSpecName: "kube-api-access-f57lj") pod "659840ff-0364-4f9c-881d-ab33bbc42c09" (UID: "659840ff-0364-4f9c-881d-ab33bbc42c09"). InnerVolumeSpecName "kube-api-access-f57lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.701102 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-config-data" (OuterVolumeSpecName: "config-data") pod "659840ff-0364-4f9c-881d-ab33bbc42c09" (UID: "659840ff-0364-4f9c-881d-ab33bbc42c09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.711029 4774 generic.go:334] "Generic (PLEG): container finished" podID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" containerID="c1a2b84c75c549c10af090d8ea02fb0cb0d9fa9237d914a82d014aeeec7b09df" exitCode=2 Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.711273 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3e0a71b0-ad47-44f0-9c49-59a1430418b8","Type":"ContainerDied","Data":"c1a2b84c75c549c10af090d8ea02fb0cb0d9fa9237d914a82d014aeeec7b09df"} Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.715054 4774 generic.go:334] "Generic (PLEG): container finished" podID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerID="4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451" exitCode=0 Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.715274 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"659840ff-0364-4f9c-881d-ab33bbc42c09","Type":"ContainerDied","Data":"4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451"} Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.715362 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"659840ff-0364-4f9c-881d-ab33bbc42c09","Type":"ContainerDied","Data":"eac174a471310e8649f9463816bafeafac1ef2606a4e9fc1af710b7bfd501e02"} Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.715402 4774 scope.go:117] "RemoveContainer" containerID="4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.715312 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.721982 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "659840ff-0364-4f9c-881d-ab33bbc42c09" (UID: "659840ff-0364-4f9c-881d-ab33bbc42c09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.723549 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.742500 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/659840ff-0364-4f9c-881d-ab33bbc42c09-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.742588 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.742605 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/659840ff-0364-4f9c-881d-ab33bbc42c09-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.742618 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f57lj\" (UniqueName: \"kubernetes.io/projected/659840ff-0364-4f9c-881d-ab33bbc42c09-kube-api-access-f57lj\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.794139 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.825501 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.831094 4774 scope.go:117] "RemoveContainer" containerID="c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.835979 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.846145 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: E1121 14:26:47.846791 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-api" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.846810 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-api" Nov 21 14:26:47 crc kubenswrapper[4774]: E1121 14:26:47.846868 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-log" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.846878 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-log" Nov 21 14:26:47 crc kubenswrapper[4774]: E1121 14:26:47.846918 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" containerName="kube-state-metrics" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.846927 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" containerName="kube-state-metrics" Nov 21 14:26:47 crc kubenswrapper[4774]: E1121 14:26:47.846975 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" containerName="nova-scheduler-scheduler" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.846988 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" containerName="nova-scheduler-scheduler" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.847251 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-api" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.847266 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" containerName="nova-scheduler-scheduler" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.847276 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" containerName="nova-api-log" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.847289 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" containerName="kube-state-metrics" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.848614 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.861517 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.869014 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.871755 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.876343 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.883616 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.897783 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.898955 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.900254 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.902783 4774 scope.go:117] "RemoveContainer" containerID="4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451" Nov 21 14:26:47 crc kubenswrapper[4774]: E1121 14:26:47.903289 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451\": container with ID starting with 4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451 not found: ID does not exist" containerID="4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.903338 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451"} err="failed to get container status \"4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451\": rpc error: code = NotFound desc = could not find container \"4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451\": container with ID starting with 4bf2c4d27c9e4ade25aa9b4a9c7e83686c87a258158af5de50c6b50cd1486451 not found: ID does not exist" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.903366 4774 scope.go:117] "RemoveContainer" containerID="c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee" Nov 21 14:26:47 crc kubenswrapper[4774]: E1121 14:26:47.903678 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee\": container with ID starting with c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee not found: ID does not exist" containerID="c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.903740 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee"} err="failed to get container status \"c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee\": rpc error: code = NotFound desc = could not find container \"c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee\": container with ID starting with c30e1a05e38fce9b1e27c141e2b16ada1cf5e89f21c9c64b4b2808c22527c2ee not found: ID does not exist" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.946780 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8kpv\" (UniqueName: \"kubernetes.io/projected/3e0a71b0-ad47-44f0-9c49-59a1430418b8-kube-api-access-c8kpv\") pod \"3e0a71b0-ad47-44f0-9c49-59a1430418b8\" (UID: \"3e0a71b0-ad47-44f0-9c49-59a1430418b8\") " Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.947935 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72e3e847-c565-467a-9f43-64641e919888-logs\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.947979 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.948081 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-config-data\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.948170 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.948217 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snpjj\" (UniqueName: \"kubernetes.io/projected/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-kube-api-access-snpjj\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.948272 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-config-data\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.948332 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5d72q\" (UniqueName: \"kubernetes.io/projected/72e3e847-c565-467a-9f43-64641e919888-kube-api-access-5d72q\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:47 crc kubenswrapper[4774]: I1121 14:26:47.950973 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e0a71b0-ad47-44f0-9c49-59a1430418b8-kube-api-access-c8kpv" (OuterVolumeSpecName: "kube-api-access-c8kpv") pod "3e0a71b0-ad47-44f0-9c49-59a1430418b8" (UID: "3e0a71b0-ad47-44f0-9c49-59a1430418b8"). InnerVolumeSpecName "kube-api-access-c8kpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.049811 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-config-data\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050136 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050173 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snpjj\" (UniqueName: \"kubernetes.io/projected/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-kube-api-access-snpjj\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050213 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-config-data\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050255 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5d72q\" (UniqueName: \"kubernetes.io/projected/72e3e847-c565-467a-9f43-64641e919888-kube-api-access-5d72q\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050303 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72e3e847-c565-467a-9f43-64641e919888-logs\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050323 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.050387 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8kpv\" (UniqueName: \"kubernetes.io/projected/3e0a71b0-ad47-44f0-9c49-59a1430418b8-kube-api-access-c8kpv\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.051183 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72e3e847-c565-467a-9f43-64641e919888-logs\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.054703 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.055861 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-config-data\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.055944 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-config-data\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.057646 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.066924 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snpjj\" (UniqueName: \"kubernetes.io/projected/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-kube-api-access-snpjj\") pod \"nova-scheduler-0\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.068797 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5d72q\" (UniqueName: \"kubernetes.io/projected/72e3e847-c565-467a-9f43-64641e919888-kube-api-access-5d72q\") pod \"nova-api-0\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.113592 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="659840ff-0364-4f9c-881d-ab33bbc42c09" path="/var/lib/kubelet/pods/659840ff-0364-4f9c-881d-ab33bbc42c09/volumes" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.114470 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95c540a0-ab8c-4f25-9f17-cc73c4a89cca" path="/var/lib/kubelet/pods/95c540a0-ab8c-4f25-9f17-cc73c4a89cca/volumes" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.192461 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.232500 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.270069 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.271006 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 14:26:48 crc kubenswrapper[4774]: W1121 14:26:48.725845 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72e3e847_c565_467a_9f43_64641e919888.slice/crio-cb07a9243eac870fbb52d522a6e12141e7d3d1b5c1276daf4792cd193a6f192d WatchSource:0}: Error finding container cb07a9243eac870fbb52d522a6e12141e7d3d1b5c1276daf4792cd193a6f192d: Status 404 returned error can't find the container with id cb07a9243eac870fbb52d522a6e12141e7d3d1b5c1276daf4792cd193a6f192d Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.726213 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.731722 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"3e0a71b0-ad47-44f0-9c49-59a1430418b8","Type":"ContainerDied","Data":"450725972d1beec310bb5828f66c7775a4419d4033d9b6e79e1b97145960f1fe"} Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.731801 4774 scope.go:117] "RemoveContainer" containerID="c1a2b84c75c549c10af090d8ea02fb0cb0d9fa9237d914a82d014aeeec7b09df" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.732143 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.801224 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.818919 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.830680 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.843042 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.845392 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.851484 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.851555 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.858233 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.975652 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.975777 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx4w9\" (UniqueName: \"kubernetes.io/projected/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-api-access-tx4w9\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.975899 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:48 crc kubenswrapper[4774]: I1121 14:26:48.975964 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.077811 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx4w9\" (UniqueName: \"kubernetes.io/projected/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-api-access-tx4w9\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.078246 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.078335 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.078424 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.083580 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.084427 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.085162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.103885 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx4w9\" (UniqueName: \"kubernetes.io/projected/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-api-access-tx4w9\") pod \"kube-state-metrics-0\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.349081 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.349429 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-central-agent" containerID="cri-o://cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae" gracePeriod=30 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.349550 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="sg-core" containerID="cri-o://3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06" gracePeriod=30 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.349570 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-notification-agent" containerID="cri-o://d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e" gracePeriod=30 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.349633 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="proxy-httpd" containerID="cri-o://dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6" gracePeriod=30 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.388302 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.746416 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780","Type":"ContainerStarted","Data":"8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.746702 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780","Type":"ContainerStarted","Data":"7de3ed65ef77133d71e392e679129a092ff168ebd2a17b78af49a6e80f021211"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.752391 4774 generic.go:334] "Generic (PLEG): container finished" podID="9a4ee847-f087-4d23-a078-96245826a60c" containerID="dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6" exitCode=0 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.752429 4774 generic.go:334] "Generic (PLEG): container finished" podID="9a4ee847-f087-4d23-a078-96245826a60c" containerID="3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06" exitCode=2 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.752462 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerDied","Data":"dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.752524 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerDied","Data":"3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.754931 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72e3e847-c565-467a-9f43-64641e919888","Type":"ContainerStarted","Data":"1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.754978 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72e3e847-c565-467a-9f43-64641e919888","Type":"ContainerStarted","Data":"c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.754993 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72e3e847-c565-467a-9f43-64641e919888","Type":"ContainerStarted","Data":"cb07a9243eac870fbb52d522a6e12141e7d3d1b5c1276daf4792cd193a6f192d"} Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.772724 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.77270139 podStartE2EDuration="2.77270139s" podCreationTimestamp="2025-11-21 14:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:49.765302546 +0000 UTC m=+1400.417501815" watchObservedRunningTime="2025-11-21 14:26:49.77270139 +0000 UTC m=+1400.424900649" Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.806476 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.806440896 podStartE2EDuration="2.806440896s" podCreationTimestamp="2025-11-21 14:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:26:49.789316061 +0000 UTC m=+1400.441515320" watchObservedRunningTime="2025-11-21 14:26:49.806440896 +0000 UTC m=+1400.458640155" Nov 21 14:26:49 crc kubenswrapper[4774]: W1121 14:26:49.867257 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod424dddc1_7019_40ab_b405_a2dcaee08c65.slice/crio-eca47116e2e11cf007e39b5eb2ad4e3e760120b62040ea790aedf25cadf061e1 WatchSource:0}: Error finding container eca47116e2e11cf007e39b5eb2ad4e3e760120b62040ea790aedf25cadf061e1: Status 404 returned error can't find the container with id eca47116e2e11cf007e39b5eb2ad4e3e760120b62040ea790aedf25cadf061e1 Nov 21 14:26:49 crc kubenswrapper[4774]: I1121 14:26:49.870114 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.106692 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e0a71b0-ad47-44f0-9c49-59a1430418b8" path="/var/lib/kubelet/pods/3e0a71b0-ad47-44f0-9c49-59a1430418b8/volumes" Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.157723 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.773932 4774 generic.go:334] "Generic (PLEG): container finished" podID="9a4ee847-f087-4d23-a078-96245826a60c" containerID="cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae" exitCode=0 Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.774322 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerDied","Data":"cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae"} Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.779045 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"424dddc1-7019-40ab-b405-a2dcaee08c65","Type":"ContainerStarted","Data":"f7e24f67518e454751426c3c5dc72df1fbe276fbaaac5b326b29c0ee877432a8"} Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.779227 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"424dddc1-7019-40ab-b405-a2dcaee08c65","Type":"ContainerStarted","Data":"eca47116e2e11cf007e39b5eb2ad4e3e760120b62040ea790aedf25cadf061e1"} Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.780155 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 21 14:26:50 crc kubenswrapper[4774]: I1121 14:26:50.810716 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.426555486 podStartE2EDuration="2.810686543s" podCreationTimestamp="2025-11-21 14:26:48 +0000 UTC" firstStartedPulling="2025-11-21 14:26:49.870076636 +0000 UTC m=+1400.522275885" lastFinishedPulling="2025-11-21 14:26:50.254207683 +0000 UTC m=+1400.906406942" observedRunningTime="2025-11-21 14:26:50.799238292 +0000 UTC m=+1401.451437551" watchObservedRunningTime="2025-11-21 14:26:50.810686543 +0000 UTC m=+1401.462885802" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.521808 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655473 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-scripts\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655544 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-sg-core-conf-yaml\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655630 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8hqq\" (UniqueName: \"kubernetes.io/projected/9a4ee847-f087-4d23-a078-96245826a60c-kube-api-access-f8hqq\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655676 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-log-httpd\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655720 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-config-data\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655748 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-run-httpd\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.655869 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-combined-ca-bundle\") pod \"9a4ee847-f087-4d23-a078-96245826a60c\" (UID: \"9a4ee847-f087-4d23-a078-96245826a60c\") " Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.656596 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.656956 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.663189 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-scripts" (OuterVolumeSpecName: "scripts") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.663944 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a4ee847-f087-4d23-a078-96245826a60c-kube-api-access-f8hqq" (OuterVolumeSpecName: "kube-api-access-f8hqq") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "kube-api-access-f8hqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.713042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.759510 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.759562 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.759577 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8hqq\" (UniqueName: \"kubernetes.io/projected/9a4ee847-f087-4d23-a078-96245826a60c-kube-api-access-f8hqq\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.759589 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.759601 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9a4ee847-f087-4d23-a078-96245826a60c-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.761430 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.801282 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-config-data" (OuterVolumeSpecName: "config-data") pod "9a4ee847-f087-4d23-a078-96245826a60c" (UID: "9a4ee847-f087-4d23-a078-96245826a60c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.802886 4774 generic.go:334] "Generic (PLEG): container finished" podID="9a4ee847-f087-4d23-a078-96245826a60c" containerID="d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e" exitCode=0 Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.802953 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerDied","Data":"d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e"} Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.803001 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9a4ee847-f087-4d23-a078-96245826a60c","Type":"ContainerDied","Data":"abcd43c9b157b0420d30f51972a838b3fa7f1dee61c60cc2d23031d1c94724f2"} Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.803022 4774 scope.go:117] "RemoveContainer" containerID="dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.803024 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.833676 4774 scope.go:117] "RemoveContainer" containerID="3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.872513 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.872568 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a4ee847-f087-4d23-a078-96245826a60c-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.872733 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.892019 4774 scope.go:117] "RemoveContainer" containerID="d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.897886 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.929717 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.930655 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-notification-agent" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.930679 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-notification-agent" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.930695 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="sg-core" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.930702 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="sg-core" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.930715 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-central-agent" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.930721 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-central-agent" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.930759 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="proxy-httpd" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.930766 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="proxy-httpd" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.930968 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-central-agent" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.930997 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="ceilometer-notification-agent" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.931008 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="sg-core" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.931017 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a4ee847-f087-4d23-a078-96245826a60c" containerName="proxy-httpd" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.933158 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.936035 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.936070 4774 scope.go:117] "RemoveContainer" containerID="cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.936541 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.936943 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.952596 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.974651 4774 scope.go:117] "RemoveContainer" containerID="dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.975336 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6\": container with ID starting with dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6 not found: ID does not exist" containerID="dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.975401 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6"} err="failed to get container status \"dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6\": rpc error: code = NotFound desc = could not find container \"dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6\": container with ID starting with dccfa00458215f48b32df3261b1a299d38f5e53b78623190d74eb83b3b7c31a6 not found: ID does not exist" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.975437 4774 scope.go:117] "RemoveContainer" containerID="3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.975898 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06\": container with ID starting with 3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06 not found: ID does not exist" containerID="3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.975933 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06"} err="failed to get container status \"3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06\": rpc error: code = NotFound desc = could not find container \"3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06\": container with ID starting with 3ef4513e62e8dd856036b5dcaec5a26c02e517e3135fe72acab294ba29109f06 not found: ID does not exist" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.975962 4774 scope.go:117] "RemoveContainer" containerID="d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.976428 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e\": container with ID starting with d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e not found: ID does not exist" containerID="d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.976464 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e"} err="failed to get container status \"d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e\": rpc error: code = NotFound desc = could not find container \"d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e\": container with ID starting with d7db9a5a04b746a6115a9d636b74271c72bbc0ba493efde42815ff5a0c01783e not found: ID does not exist" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.976484 4774 scope.go:117] "RemoveContainer" containerID="cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae" Nov 21 14:26:52 crc kubenswrapper[4774]: E1121 14:26:52.976857 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae\": container with ID starting with cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae not found: ID does not exist" containerID="cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae" Nov 21 14:26:52 crc kubenswrapper[4774]: I1121 14:26:52.976914 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae"} err="failed to get container status \"cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae\": rpc error: code = NotFound desc = could not find container \"cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae\": container with ID starting with cfa64402f10685ddb9671088e02396f471cee21f579dba5584d9a8140a3b24ae not found: ID does not exist" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.076600 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxt5v\" (UniqueName: \"kubernetes.io/projected/e0d6fcef-c135-49fd-b80d-274d4ed2193a-kube-api-access-jxt5v\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077042 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-run-httpd\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077122 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077336 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-scripts\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077442 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-config-data\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077469 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-log-httpd\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.077641 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179703 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179844 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-scripts\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179874 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179893 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-config-data\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179933 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-log-httpd\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179954 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.179997 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxt5v\" (UniqueName: \"kubernetes.io/projected/e0d6fcef-c135-49fd-b80d-274d4ed2193a-kube-api-access-jxt5v\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.180036 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-run-httpd\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.181307 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-log-httpd\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.181506 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-run-httpd\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.185587 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.186606 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.186900 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-config-data\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.187232 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-scripts\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.187521 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.207323 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxt5v\" (UniqueName: \"kubernetes.io/projected/e0d6fcef-c135-49fd-b80d-274d4ed2193a-kube-api-access-jxt5v\") pod \"ceilometer-0\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.233513 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.256216 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.270060 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.270115 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.722752 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:26:53 crc kubenswrapper[4774]: I1121 14:26:53.813801 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerStarted","Data":"4c6737e75eb6b61aac9d25c260dde155122ba35b80e285f4c3c223a3a45617d0"} Nov 21 14:26:54 crc kubenswrapper[4774]: I1121 14:26:54.108843 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a4ee847-f087-4d23-a078-96245826a60c" path="/var/lib/kubelet/pods/9a4ee847-f087-4d23-a078-96245826a60c/volumes" Nov 21 14:26:54 crc kubenswrapper[4774]: I1121 14:26:54.286045 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:26:54 crc kubenswrapper[4774]: I1121 14:26:54.286012 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:26:54 crc kubenswrapper[4774]: I1121 14:26:54.828220 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerStarted","Data":"3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f"} Nov 21 14:26:55 crc kubenswrapper[4774]: I1121 14:26:55.846134 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerStarted","Data":"4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490"} Nov 21 14:26:56 crc kubenswrapper[4774]: I1121 14:26:56.862978 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerStarted","Data":"5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630"} Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.193573 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.194206 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.233299 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.276053 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.889726 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerStarted","Data":"3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234"} Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.921528 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.943039309 podStartE2EDuration="6.921491463s" podCreationTimestamp="2025-11-21 14:26:52 +0000 UTC" firstStartedPulling="2025-11-21 14:26:53.74509526 +0000 UTC m=+1404.397294519" lastFinishedPulling="2025-11-21 14:26:57.723547414 +0000 UTC m=+1408.375746673" observedRunningTime="2025-11-21 14:26:58.916503598 +0000 UTC m=+1409.568702857" watchObservedRunningTime="2025-11-21 14:26:58.921491463 +0000 UTC m=+1409.573690732" Nov 21 14:26:58 crc kubenswrapper[4774]: I1121 14:26:58.921937 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 14:26:59 crc kubenswrapper[4774]: I1121 14:26:59.278140 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:26:59 crc kubenswrapper[4774]: I1121 14:26:59.278184 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:26:59 crc kubenswrapper[4774]: I1121 14:26:59.401657 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 21 14:26:59 crc kubenswrapper[4774]: I1121 14:26:59.900414 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:27:03 crc kubenswrapper[4774]: I1121 14:27:03.275671 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 14:27:03 crc kubenswrapper[4774]: I1121 14:27:03.276540 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 14:27:03 crc kubenswrapper[4774]: I1121 14:27:03.290285 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 14:27:03 crc kubenswrapper[4774]: I1121 14:27:03.292553 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 14:27:05 crc kubenswrapper[4774]: I1121 14:27:05.851274 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:05 crc kubenswrapper[4774]: I1121 14:27:05.981918 4774 generic.go:334] "Generic (PLEG): container finished" podID="b46021eb-b966-48c2-a988-c050546ce0d6" containerID="492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704" exitCode=137 Nov 21 14:27:05 crc kubenswrapper[4774]: I1121 14:27:05.981963 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b46021eb-b966-48c2-a988-c050546ce0d6","Type":"ContainerDied","Data":"492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704"} Nov 21 14:27:05 crc kubenswrapper[4774]: I1121 14:27:05.981993 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"b46021eb-b966-48c2-a988-c050546ce0d6","Type":"ContainerDied","Data":"b8991e8a11d96c002effbb4c5f7ac294701d818839e237ac0e31e5c34e2a6968"} Nov 21 14:27:05 crc kubenswrapper[4774]: I1121 14:27:05.982012 4774 scope.go:117] "RemoveContainer" containerID="492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704" Nov 21 14:27:05 crc kubenswrapper[4774]: I1121 14:27:05.982047 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.011250 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szp97\" (UniqueName: \"kubernetes.io/projected/b46021eb-b966-48c2-a988-c050546ce0d6-kube-api-access-szp97\") pod \"b46021eb-b966-48c2-a988-c050546ce0d6\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.011449 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-combined-ca-bundle\") pod \"b46021eb-b966-48c2-a988-c050546ce0d6\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.011630 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-config-data\") pod \"b46021eb-b966-48c2-a988-c050546ce0d6\" (UID: \"b46021eb-b966-48c2-a988-c050546ce0d6\") " Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.012872 4774 scope.go:117] "RemoveContainer" containerID="492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704" Nov 21 14:27:06 crc kubenswrapper[4774]: E1121 14:27:06.013603 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704\": container with ID starting with 492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704 not found: ID does not exist" containerID="492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.013695 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704"} err="failed to get container status \"492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704\": rpc error: code = NotFound desc = could not find container \"492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704\": container with ID starting with 492f53da35208f11b7c72ef2510229f9cf2a01ee14405a0cf09860dbe4974704 not found: ID does not exist" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.027276 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b46021eb-b966-48c2-a988-c050546ce0d6-kube-api-access-szp97" (OuterVolumeSpecName: "kube-api-access-szp97") pod "b46021eb-b966-48c2-a988-c050546ce0d6" (UID: "b46021eb-b966-48c2-a988-c050546ce0d6"). InnerVolumeSpecName "kube-api-access-szp97". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.048374 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-config-data" (OuterVolumeSpecName: "config-data") pod "b46021eb-b966-48c2-a988-c050546ce0d6" (UID: "b46021eb-b966-48c2-a988-c050546ce0d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.059902 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b46021eb-b966-48c2-a988-c050546ce0d6" (UID: "b46021eb-b966-48c2-a988-c050546ce0d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.113976 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.114329 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szp97\" (UniqueName: \"kubernetes.io/projected/b46021eb-b966-48c2-a988-c050546ce0d6-kube-api-access-szp97\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.114449 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b46021eb-b966-48c2-a988-c050546ce0d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.316879 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.330627 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.341649 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:27:06 crc kubenswrapper[4774]: E1121 14:27:06.342446 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b46021eb-b966-48c2-a988-c050546ce0d6" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.342467 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b46021eb-b966-48c2-a988-c050546ce0d6" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.342743 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b46021eb-b966-48c2-a988-c050546ce0d6" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.343744 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.346955 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.358247 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.359056 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.360978 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.537401 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.537468 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4kfn\" (UniqueName: \"kubernetes.io/projected/e0f5811f-60f6-4820-b981-715448365e52-kube-api-access-x4kfn\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.537527 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.537560 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.537651 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.640007 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.640100 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.640131 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4kfn\" (UniqueName: \"kubernetes.io/projected/e0f5811f-60f6-4820-b981-715448365e52-kube-api-access-x4kfn\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.640177 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.640211 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.644888 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.644982 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.649117 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.650124 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.663303 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4kfn\" (UniqueName: \"kubernetes.io/projected/e0f5811f-60f6-4820-b981-715448365e52-kube-api-access-x4kfn\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:06 crc kubenswrapper[4774]: I1121 14:27:06.677204 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:07 crc kubenswrapper[4774]: I1121 14:27:07.190552 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.006591 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0f5811f-60f6-4820-b981-715448365e52","Type":"ContainerStarted","Data":"36dd18acf6da72cf687d626808b4dda1668438a188a9f1018c121f0a0c64d299"} Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.006985 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0f5811f-60f6-4820-b981-715448365e52","Type":"ContainerStarted","Data":"a5b8ada10b9a77eb9454b6acc14455cdf1878ee9df8d3f23a9b3ecc30a817659"} Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.039900 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.039877904 podStartE2EDuration="2.039877904s" podCreationTimestamp="2025-11-21 14:27:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:08.031366488 +0000 UTC m=+1418.683565747" watchObservedRunningTime="2025-11-21 14:27:08.039877904 +0000 UTC m=+1418.692077163" Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.104328 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b46021eb-b966-48c2-a988-c050546ce0d6" path="/var/lib/kubelet/pods/b46021eb-b966-48c2-a988-c050546ce0d6/volumes" Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.197812 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.199937 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.200480 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 14:27:08 crc kubenswrapper[4774]: I1121 14:27:08.202014 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.014780 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.018236 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.212107 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6868d89965-nrgpl"] Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.214385 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.243350 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6868d89965-nrgpl"] Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.403563 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-config\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.403627 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-sb\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.403686 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-svc\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.403724 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zblm\" (UniqueName: \"kubernetes.io/projected/f89a7785-0a49-4c28-a587-ec113d2f3635-kube-api-access-5zblm\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.403799 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-nb\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.403856 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-swift-storage-0\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.505779 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-nb\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.505909 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-swift-storage-0\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.505990 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-config\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.506029 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-sb\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.506069 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-svc\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.506113 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zblm\" (UniqueName: \"kubernetes.io/projected/f89a7785-0a49-4c28-a587-ec113d2f3635-kube-api-access-5zblm\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.507101 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-swift-storage-0\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.507142 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-sb\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.507247 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-config\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.507270 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-svc\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.508205 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-nb\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.529098 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zblm\" (UniqueName: \"kubernetes.io/projected/f89a7785-0a49-4c28-a587-ec113d2f3635-kube-api-access-5zblm\") pod \"dnsmasq-dns-6868d89965-nrgpl\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.541696 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:09 crc kubenswrapper[4774]: I1121 14:27:09.872036 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6868d89965-nrgpl"] Nov 21 14:27:09 crc kubenswrapper[4774]: W1121 14:27:09.873540 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf89a7785_0a49_4c28_a587_ec113d2f3635.slice/crio-9039f1b79fd3bb9f3fd9c2cd2365af0009ab0f999d41748d7258c8e87371def6 WatchSource:0}: Error finding container 9039f1b79fd3bb9f3fd9c2cd2365af0009ab0f999d41748d7258c8e87371def6: Status 404 returned error can't find the container with id 9039f1b79fd3bb9f3fd9c2cd2365af0009ab0f999d41748d7258c8e87371def6 Nov 21 14:27:10 crc kubenswrapper[4774]: I1121 14:27:10.032633 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" event={"ID":"f89a7785-0a49-4c28-a587-ec113d2f3635","Type":"ContainerStarted","Data":"9039f1b79fd3bb9f3fd9c2cd2365af0009ab0f999d41748d7258c8e87371def6"} Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.043467 4774 generic.go:334] "Generic (PLEG): container finished" podID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerID="146696e90c13f79100e0e356f29802f45ab9194b43414372cba67548f83118f8" exitCode=0 Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.043664 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" event={"ID":"f89a7785-0a49-4c28-a587-ec113d2f3635","Type":"ContainerDied","Data":"146696e90c13f79100e0e356f29802f45ab9194b43414372cba67548f83118f8"} Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.374675 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.375641 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="sg-core" containerID="cri-o://5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630" gracePeriod=30 Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.375685 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="proxy-httpd" containerID="cri-o://3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234" gracePeriod=30 Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.375735 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-notification-agent" containerID="cri-o://4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490" gracePeriod=30 Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.375604 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-central-agent" containerID="cri-o://3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f" gracePeriod=30 Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.477419 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.195:3000/\": read tcp 10.217.0.2:53072->10.217.0.195:3000: read: connection reset by peer" Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.677743 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:11 crc kubenswrapper[4774]: I1121 14:27:11.827862 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.068870 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" event={"ID":"f89a7785-0a49-4c28-a587-ec113d2f3635","Type":"ContainerStarted","Data":"1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937"} Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.068953 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075209 4774 generic.go:334] "Generic (PLEG): container finished" podID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerID="3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234" exitCode=0 Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075299 4774 generic.go:334] "Generic (PLEG): container finished" podID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerID="5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630" exitCode=2 Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075312 4774 generic.go:334] "Generic (PLEG): container finished" podID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerID="3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f" exitCode=0 Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075532 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-log" containerID="cri-o://c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a" gracePeriod=30 Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075852 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerDied","Data":"3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234"} Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075881 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerDied","Data":"5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630"} Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075892 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerDied","Data":"3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f"} Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.075948 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-api" containerID="cri-o://1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8" gracePeriod=30 Nov 21 14:27:12 crc kubenswrapper[4774]: I1121 14:27:12.088561 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" podStartSLOduration=3.088534649 podStartE2EDuration="3.088534649s" podCreationTimestamp="2025-11-21 14:27:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:12.087268992 +0000 UTC m=+1422.739468281" watchObservedRunningTime="2025-11-21 14:27:12.088534649 +0000 UTC m=+1422.740733908" Nov 21 14:27:13 crc kubenswrapper[4774]: I1121 14:27:13.085781 4774 generic.go:334] "Generic (PLEG): container finished" podID="72e3e847-c565-467a-9f43-64641e919888" containerID="c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a" exitCode=143 Nov 21 14:27:13 crc kubenswrapper[4774]: I1121 14:27:13.085871 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72e3e847-c565-467a-9f43-64641e919888","Type":"ContainerDied","Data":"c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a"} Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.824932 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.972339 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5d72q\" (UniqueName: \"kubernetes.io/projected/72e3e847-c565-467a-9f43-64641e919888-kube-api-access-5d72q\") pod \"72e3e847-c565-467a-9f43-64641e919888\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.972572 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72e3e847-c565-467a-9f43-64641e919888-logs\") pod \"72e3e847-c565-467a-9f43-64641e919888\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.972761 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-combined-ca-bundle\") pod \"72e3e847-c565-467a-9f43-64641e919888\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.972851 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-config-data\") pod \"72e3e847-c565-467a-9f43-64641e919888\" (UID: \"72e3e847-c565-467a-9f43-64641e919888\") " Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.973379 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/72e3e847-c565-467a-9f43-64641e919888-logs" (OuterVolumeSpecName: "logs") pod "72e3e847-c565-467a-9f43-64641e919888" (UID: "72e3e847-c565-467a-9f43-64641e919888"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:15 crc kubenswrapper[4774]: I1121 14:27:15.982921 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72e3e847-c565-467a-9f43-64641e919888-kube-api-access-5d72q" (OuterVolumeSpecName: "kube-api-access-5d72q") pod "72e3e847-c565-467a-9f43-64641e919888" (UID: "72e3e847-c565-467a-9f43-64641e919888"). InnerVolumeSpecName "kube-api-access-5d72q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.007699 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-config-data" (OuterVolumeSpecName: "config-data") pod "72e3e847-c565-467a-9f43-64641e919888" (UID: "72e3e847-c565-467a-9f43-64641e919888"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.014710 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72e3e847-c565-467a-9f43-64641e919888" (UID: "72e3e847-c565-467a-9f43-64641e919888"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.075698 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.075733 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72e3e847-c565-467a-9f43-64641e919888-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.075743 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5d72q\" (UniqueName: \"kubernetes.io/projected/72e3e847-c565-467a-9f43-64641e919888-kube-api-access-5d72q\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.075756 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/72e3e847-c565-467a-9f43-64641e919888-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.124172 4774 generic.go:334] "Generic (PLEG): container finished" podID="72e3e847-c565-467a-9f43-64641e919888" containerID="1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8" exitCode=0 Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.124223 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72e3e847-c565-467a-9f43-64641e919888","Type":"ContainerDied","Data":"1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8"} Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.124231 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.124262 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"72e3e847-c565-467a-9f43-64641e919888","Type":"ContainerDied","Data":"cb07a9243eac870fbb52d522a6e12141e7d3d1b5c1276daf4792cd193a6f192d"} Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.124282 4774 scope.go:117] "RemoveContainer" containerID="1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.158325 4774 scope.go:117] "RemoveContainer" containerID="c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.166648 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.184726 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.190239 4774 scope.go:117] "RemoveContainer" containerID="1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8" Nov 21 14:27:16 crc kubenswrapper[4774]: E1121 14:27:16.190907 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8\": container with ID starting with 1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8 not found: ID does not exist" containerID="1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.190944 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8"} err="failed to get container status \"1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8\": rpc error: code = NotFound desc = could not find container \"1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8\": container with ID starting with 1db670d57e82dd3bdf23340c2f89eb42ee44db3d25467b6197aec6585b8042e8 not found: ID does not exist" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.190970 4774 scope.go:117] "RemoveContainer" containerID="c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a" Nov 21 14:27:16 crc kubenswrapper[4774]: E1121 14:27:16.192066 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a\": container with ID starting with c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a not found: ID does not exist" containerID="c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.192136 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a"} err="failed to get container status \"c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a\": rpc error: code = NotFound desc = could not find container \"c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a\": container with ID starting with c6910b952dd9ba0cd91749ac5f11fb863e94b025847bbcc969e75a740825ca1a not found: ID does not exist" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.193750 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:16 crc kubenswrapper[4774]: E1121 14:27:16.194413 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-log" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.194442 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-log" Nov 21 14:27:16 crc kubenswrapper[4774]: E1121 14:27:16.194462 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-api" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.194470 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-api" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.194713 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-api" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.194736 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="72e3e847-c565-467a-9f43-64641e919888" containerName="nova-api-log" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.195907 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.201087 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.202369 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.202603 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.202791 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.386142 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-public-tls-certs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.386385 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.386542 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snwz8\" (UniqueName: \"kubernetes.io/projected/137108f4-eec9-48ea-89ad-ec6adc0a01db-kube-api-access-snwz8\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.386616 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-config-data\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.386804 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/137108f4-eec9-48ea-89ad-ec6adc0a01db-logs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.386899 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-internal-tls-certs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.488840 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snwz8\" (UniqueName: \"kubernetes.io/projected/137108f4-eec9-48ea-89ad-ec6adc0a01db-kube-api-access-snwz8\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.489347 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-config-data\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.489399 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/137108f4-eec9-48ea-89ad-ec6adc0a01db-logs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.489425 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-internal-tls-certs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.489523 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-public-tls-certs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.489655 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.490903 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/137108f4-eec9-48ea-89ad-ec6adc0a01db-logs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.494737 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-public-tls-certs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.494848 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-internal-tls-certs\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.497364 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-config-data\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.510929 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.511280 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snwz8\" (UniqueName: \"kubernetes.io/projected/137108f4-eec9-48ea-89ad-ec6adc0a01db-kube-api-access-snwz8\") pod \"nova-api-0\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.519529 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.678247 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.709507 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.712463 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.901001 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-combined-ca-bundle\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.901567 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-config-data\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.901605 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-ceilometer-tls-certs\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.901670 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxt5v\" (UniqueName: \"kubernetes.io/projected/e0d6fcef-c135-49fd-b80d-274d4ed2193a-kube-api-access-jxt5v\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.901870 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-scripts\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.901986 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-log-httpd\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.902030 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-sg-core-conf-yaml\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.902072 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-run-httpd\") pod \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\" (UID: \"e0d6fcef-c135-49fd-b80d-274d4ed2193a\") " Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.902678 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.903601 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.904528 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.904552 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e0d6fcef-c135-49fd-b80d-274d4ed2193a-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.907561 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-scripts" (OuterVolumeSpecName: "scripts") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.911281 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0d6fcef-c135-49fd-b80d-274d4ed2193a-kube-api-access-jxt5v" (OuterVolumeSpecName: "kube-api-access-jxt5v") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "kube-api-access-jxt5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.937038 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:16 crc kubenswrapper[4774]: I1121 14:27:16.966296 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.007052 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.007088 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.007101 4774 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.007112 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxt5v\" (UniqueName: \"kubernetes.io/projected/e0d6fcef-c135-49fd-b80d-274d4ed2193a-kube-api-access-jxt5v\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.013001 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.022770 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-config-data" (OuterVolumeSpecName: "config-data") pod "e0d6fcef-c135-49fd-b80d-274d4ed2193a" (UID: "e0d6fcef-c135-49fd-b80d-274d4ed2193a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.052962 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.110145 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.110196 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0d6fcef-c135-49fd-b80d-274d4ed2193a-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.140868 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerDied","Data":"4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490"} Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.140890 4774 generic.go:334] "Generic (PLEG): container finished" podID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerID="4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490" exitCode=0 Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.140937 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e0d6fcef-c135-49fd-b80d-274d4ed2193a","Type":"ContainerDied","Data":"4c6737e75eb6b61aac9d25c260dde155122ba35b80e285f4c3c223a3a45617d0"} Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.140969 4774 scope.go:117] "RemoveContainer" containerID="3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.140988 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.143447 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"137108f4-eec9-48ea-89ad-ec6adc0a01db","Type":"ContainerStarted","Data":"f9761038ddc9e7c6231afef0c69d5639c988a2e1967e47200236ea0dfd253ef2"} Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.168248 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.172268 4774 scope.go:117] "RemoveContainer" containerID="5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.196263 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.199556 4774 scope.go:117] "RemoveContainer" containerID="4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.240067 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.255309 4774 scope.go:117] "RemoveContainer" containerID="3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.296857 4774 scope.go:117] "RemoveContainer" containerID="3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.299473 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234\": container with ID starting with 3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234 not found: ID does not exist" containerID="3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.299509 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234"} err="failed to get container status \"3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234\": rpc error: code = NotFound desc = could not find container \"3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234\": container with ID starting with 3bbfd306817efed3ed5ae878a9a3a0fe36cc8d3c78426b19552faab9746c8234 not found: ID does not exist" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.299539 4774 scope.go:117] "RemoveContainer" containerID="5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.300545 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630\": container with ID starting with 5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630 not found: ID does not exist" containerID="5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.300569 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630"} err="failed to get container status \"5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630\": rpc error: code = NotFound desc = could not find container \"5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630\": container with ID starting with 5247c56ea7ae1027cee5179ea2425ba7633a00ae8c96f654ab3e79a6ce8e8630 not found: ID does not exist" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.300585 4774 scope.go:117] "RemoveContainer" containerID="4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.300936 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490\": container with ID starting with 4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490 not found: ID does not exist" containerID="4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.301001 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490"} err="failed to get container status \"4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490\": rpc error: code = NotFound desc = could not find container \"4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490\": container with ID starting with 4c7660e8c5b6a4452bbe5059907bd68b48e99d8984c537d23fecb0de66608490 not found: ID does not exist" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.301044 4774 scope.go:117] "RemoveContainer" containerID="3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.306029 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f\": container with ID starting with 3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f not found: ID does not exist" containerID="3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.306063 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f"} err="failed to get container status \"3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f\": rpc error: code = NotFound desc = could not find container \"3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f\": container with ID starting with 3e2304311fe29cbbc2b636c2baa0934b0fee5fec334138393ea21062f4d68d7f not found: ID does not exist" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.308373 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.308907 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-notification-agent" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.308928 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-notification-agent" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.308940 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="sg-core" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.308948 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="sg-core" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.308969 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-central-agent" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.308975 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-central-agent" Nov 21 14:27:17 crc kubenswrapper[4774]: E1121 14:27:17.309002 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="proxy-httpd" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.309007 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="proxy-httpd" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.309188 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="proxy-httpd" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.309205 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-notification-agent" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.309224 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="sg-core" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.309235 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" containerName="ceilometer-central-agent" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.311389 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.318688 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.318731 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.319002 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.319421 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.410053 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrvw"] Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.416693 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.421971 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.423018 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424680 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-log-httpd\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424760 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn6hg\" (UniqueName: \"kubernetes.io/projected/dc1f1975-32c8-494c-b6c7-69a72353879f-kube-api-access-fn6hg\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424805 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424857 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-scripts\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424898 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-run-httpd\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424921 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424942 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-config-data\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.424959 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.430099 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrvw"] Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.527110 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7qhf\" (UniqueName: \"kubernetes.io/projected/7751c2da-9178-46c8-bd67-32bd9977eae4-kube-api-access-v7qhf\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.527169 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn6hg\" (UniqueName: \"kubernetes.io/projected/dc1f1975-32c8-494c-b6c7-69a72353879f-kube-api-access-fn6hg\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.527849 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.527881 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.527976 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-scripts\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528607 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-run-httpd\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528638 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528658 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-config-data\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528679 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528753 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-config-data\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528797 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-log-httpd\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.528835 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-scripts\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.529230 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-run-httpd\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.530610 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-log-httpd\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.534493 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.534796 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-scripts\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.535924 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.536249 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.540643 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-config-data\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.549962 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn6hg\" (UniqueName: \"kubernetes.io/projected/dc1f1975-32c8-494c-b6c7-69a72353879f-kube-api-access-fn6hg\") pod \"ceilometer-0\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.631279 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-config-data\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.631379 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-scripts\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.632561 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7qhf\" (UniqueName: \"kubernetes.io/projected/7751c2da-9178-46c8-bd67-32bd9977eae4-kube-api-access-v7qhf\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.632634 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.637294 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-scripts\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.637587 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.638543 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-config-data\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.650612 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7qhf\" (UniqueName: \"kubernetes.io/projected/7751c2da-9178-46c8-bd67-32bd9977eae4-kube-api-access-v7qhf\") pod \"nova-cell1-cell-mapping-rkrvw\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.651514 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:27:17 crc kubenswrapper[4774]: I1121 14:27:17.698126 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.118047 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72e3e847-c565-467a-9f43-64641e919888" path="/var/lib/kubelet/pods/72e3e847-c565-467a-9f43-64641e919888/volumes" Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.119431 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0d6fcef-c135-49fd-b80d-274d4ed2193a" path="/var/lib/kubelet/pods/e0d6fcef-c135-49fd-b80d-274d4ed2193a/volumes" Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.163452 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"137108f4-eec9-48ea-89ad-ec6adc0a01db","Type":"ContainerStarted","Data":"dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61"} Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.163496 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"137108f4-eec9-48ea-89ad-ec6adc0a01db","Type":"ContainerStarted","Data":"2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484"} Nov 21 14:27:18 crc kubenswrapper[4774]: W1121 14:27:18.203319 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc1f1975_32c8_494c_b6c7_69a72353879f.slice/crio-0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8 WatchSource:0}: Error finding container 0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8: Status 404 returned error can't find the container with id 0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8 Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.203945 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.203927092 podStartE2EDuration="2.203927092s" podCreationTimestamp="2025-11-21 14:27:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:18.18244297 +0000 UTC m=+1428.834642229" watchObservedRunningTime="2025-11-21 14:27:18.203927092 +0000 UTC m=+1428.856126351" Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.205806 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.224894 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:27:18 crc kubenswrapper[4774]: W1121 14:27:18.297219 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7751c2da_9178_46c8_bd67_32bd9977eae4.slice/crio-9d3298cd33916d332d0f566a61b126163b9701f0e7133615995de2d9f692e4b9 WatchSource:0}: Error finding container 9d3298cd33916d332d0f566a61b126163b9701f0e7133615995de2d9f692e4b9: Status 404 returned error can't find the container with id 9d3298cd33916d332d0f566a61b126163b9701f0e7133615995de2d9f692e4b9 Nov 21 14:27:18 crc kubenswrapper[4774]: I1121 14:27:18.299311 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrvw"] Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.174648 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrvw" event={"ID":"7751c2da-9178-46c8-bd67-32bd9977eae4","Type":"ContainerStarted","Data":"a7c9ecd2a24973515fc128d880451a4e28dc9101f6d676278ef3ba81ea04ca19"} Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.175645 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrvw" event={"ID":"7751c2da-9178-46c8-bd67-32bd9977eae4","Type":"ContainerStarted","Data":"9d3298cd33916d332d0f566a61b126163b9701f0e7133615995de2d9f692e4b9"} Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.177794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerStarted","Data":"a4a27b3c5077e95426b1db0a18c43f2ded3d18629d74ecfdf80ae409e2215348"} Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.177927 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerStarted","Data":"0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8"} Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.544851 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.578387 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rkrvw" podStartSLOduration=2.578362525 podStartE2EDuration="2.578362525s" podCreationTimestamp="2025-11-21 14:27:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:19.197354655 +0000 UTC m=+1429.849553924" watchObservedRunningTime="2025-11-21 14:27:19.578362525 +0000 UTC m=+1430.230561794" Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.631555 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b9c9d97f9-8wtgk"] Nov 21 14:27:19 crc kubenswrapper[4774]: I1121 14:27:19.631860 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="dnsmasq-dns" containerID="cri-o://78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9" gracePeriod=10 Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.205143 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.210785 4774 generic.go:334] "Generic (PLEG): container finished" podID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerID="78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9" exitCode=0 Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.210858 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" event={"ID":"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad","Type":"ContainerDied","Data":"78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9"} Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.211061 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" event={"ID":"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad","Type":"ContainerDied","Data":"f40c808ec77877b24e14083387396fc219cbe0388be932a053234a78f9a470a1"} Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.211105 4774 scope.go:117] "RemoveContainer" containerID="78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.215385 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerStarted","Data":"7d2c6e460846a332f45e2cd1fd8b4211e1fef71fdaba2c330e61b8c2240fa3a3"} Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.242612 4774 scope.go:117] "RemoveContainer" containerID="fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.299979 4774 scope.go:117] "RemoveContainer" containerID="78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9" Nov 21 14:27:20 crc kubenswrapper[4774]: E1121 14:27:20.300566 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9\": container with ID starting with 78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9 not found: ID does not exist" containerID="78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.300612 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9"} err="failed to get container status \"78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9\": rpc error: code = NotFound desc = could not find container \"78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9\": container with ID starting with 78588653cd6a8347dea93b08ac39044b96d2c16995d93ccae8598dc3fc76ffa9 not found: ID does not exist" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.300644 4774 scope.go:117] "RemoveContainer" containerID="fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2" Nov 21 14:27:20 crc kubenswrapper[4774]: E1121 14:27:20.301122 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2\": container with ID starting with fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2 not found: ID does not exist" containerID="fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.301155 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2"} err="failed to get container status \"fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2\": rpc error: code = NotFound desc = could not find container \"fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2\": container with ID starting with fb6b13b2596604351c6af17a663ada81ceb746ed17aadfa94e325d39581c37b2 not found: ID does not exist" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.312143 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-sb\") pod \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.312245 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-swift-storage-0\") pod \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.312288 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-config\") pod \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.312625 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8rwh\" (UniqueName: \"kubernetes.io/projected/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-kube-api-access-k8rwh\") pod \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.312667 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-nb\") pod \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.312727 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-svc\") pod \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\" (UID: \"f2bdd63f-eb21-4fdc-9aca-9ff3148dccad\") " Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.317513 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-kube-api-access-k8rwh" (OuterVolumeSpecName: "kube-api-access-k8rwh") pod "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" (UID: "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad"). InnerVolumeSpecName "kube-api-access-k8rwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.371152 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-config" (OuterVolumeSpecName: "config") pod "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" (UID: "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.375427 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" (UID: "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.387423 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" (UID: "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.387639 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" (UID: "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.388488 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" (UID: "f2bdd63f-eb21-4fdc-9aca-9ff3148dccad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.417842 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8rwh\" (UniqueName: \"kubernetes.io/projected/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-kube-api-access-k8rwh\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.417895 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.417907 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.417916 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.417925 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:20 crc kubenswrapper[4774]: I1121 14:27:20.417933 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:21 crc kubenswrapper[4774]: I1121 14:27:21.231059 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerStarted","Data":"ea9ca71244bc7f1b46eba74e6204643a6aa38bccdbe0a89dc25f4da34716f6b9"} Nov 21 14:27:21 crc kubenswrapper[4774]: I1121 14:27:21.235011 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" Nov 21 14:27:21 crc kubenswrapper[4774]: I1121 14:27:21.272119 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b9c9d97f9-8wtgk"] Nov 21 14:27:21 crc kubenswrapper[4774]: I1121 14:27:21.285524 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b9c9d97f9-8wtgk"] Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.124176 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" path="/var/lib/kubelet/pods/f2bdd63f-eb21-4fdc-9aca-9ff3148dccad/volumes" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.251810 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerStarted","Data":"b6dead7e6b8a9edca06d5258569ba83c012500d4af7950b97020473ff90fca2b"} Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.252632 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.288876 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.037202672 podStartE2EDuration="5.288845661s" podCreationTimestamp="2025-11-21 14:27:17 +0000 UTC" firstStartedPulling="2025-11-21 14:27:18.20559515 +0000 UTC m=+1428.857794409" lastFinishedPulling="2025-11-21 14:27:21.457238139 +0000 UTC m=+1432.109437398" observedRunningTime="2025-11-21 14:27:22.286755911 +0000 UTC m=+1432.938955180" watchObservedRunningTime="2025-11-21 14:27:22.288845661 +0000 UTC m=+1432.941044920" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.632862 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-g6cng"] Nov 21 14:27:22 crc kubenswrapper[4774]: E1121 14:27:22.633538 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="init" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.633561 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="init" Nov 21 14:27:22 crc kubenswrapper[4774]: E1121 14:27:22.633590 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="dnsmasq-dns" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.633599 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="dnsmasq-dns" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.633867 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="dnsmasq-dns" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.640150 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.648250 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g6cng"] Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.771638 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxlg8\" (UniqueName: \"kubernetes.io/projected/eaa5ff38-d544-4641-90cb-591db96ac40b-kube-api-access-zxlg8\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.771716 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-utilities\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.772113 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.874011 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.874194 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxlg8\" (UniqueName: \"kubernetes.io/projected/eaa5ff38-d544-4641-90cb-591db96ac40b-kube-api-access-zxlg8\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.874255 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-utilities\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.874744 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.874813 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-utilities\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.896049 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxlg8\" (UniqueName: \"kubernetes.io/projected/eaa5ff38-d544-4641-90cb-591db96ac40b-kube-api-access-zxlg8\") pod \"redhat-operators-g6cng\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:22 crc kubenswrapper[4774]: I1121 14:27:22.974268 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:23 crc kubenswrapper[4774]: I1121 14:27:23.457529 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-g6cng"] Nov 21 14:27:23 crc kubenswrapper[4774]: E1121 14:27:23.793085 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeaa5ff38_d544_4641_90cb_591db96ac40b.slice/crio-conmon-5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58.scope\": RecentStats: unable to find data in memory cache]" Nov 21 14:27:24 crc kubenswrapper[4774]: I1121 14:27:24.277477 4774 generic.go:334] "Generic (PLEG): container finished" podID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerID="5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58" exitCode=0 Nov 21 14:27:24 crc kubenswrapper[4774]: I1121 14:27:24.277554 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerDied","Data":"5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58"} Nov 21 14:27:24 crc kubenswrapper[4774]: I1121 14:27:24.277591 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerStarted","Data":"919b14cd6d9111c8afa6d4130123b42a846d732cc51316b225425d53f5aaa162"} Nov 21 14:27:24 crc kubenswrapper[4774]: I1121 14:27:24.281276 4774 generic.go:334] "Generic (PLEG): container finished" podID="7751c2da-9178-46c8-bd67-32bd9977eae4" containerID="a7c9ecd2a24973515fc128d880451a4e28dc9101f6d676278ef3ba81ea04ca19" exitCode=0 Nov 21 14:27:24 crc kubenswrapper[4774]: I1121 14:27:24.281374 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrvw" event={"ID":"7751c2da-9178-46c8-bd67-32bd9977eae4","Type":"ContainerDied","Data":"a7c9ecd2a24973515fc128d880451a4e28dc9101f6d676278ef3ba81ea04ca19"} Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.072384 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b9c9d97f9-8wtgk" podUID="f2bdd63f-eb21-4fdc-9aca-9ff3148dccad" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.187:5353: i/o timeout" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.298997 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerStarted","Data":"5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd"} Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.705194 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.847064 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-combined-ca-bundle\") pod \"7751c2da-9178-46c8-bd67-32bd9977eae4\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.847294 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-scripts\") pod \"7751c2da-9178-46c8-bd67-32bd9977eae4\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.847320 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-config-data\") pod \"7751c2da-9178-46c8-bd67-32bd9977eae4\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.847696 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7qhf\" (UniqueName: \"kubernetes.io/projected/7751c2da-9178-46c8-bd67-32bd9977eae4-kube-api-access-v7qhf\") pod \"7751c2da-9178-46c8-bd67-32bd9977eae4\" (UID: \"7751c2da-9178-46c8-bd67-32bd9977eae4\") " Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.857180 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-scripts" (OuterVolumeSpecName: "scripts") pod "7751c2da-9178-46c8-bd67-32bd9977eae4" (UID: "7751c2da-9178-46c8-bd67-32bd9977eae4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.870380 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7751c2da-9178-46c8-bd67-32bd9977eae4-kube-api-access-v7qhf" (OuterVolumeSpecName: "kube-api-access-v7qhf") pod "7751c2da-9178-46c8-bd67-32bd9977eae4" (UID: "7751c2da-9178-46c8-bd67-32bd9977eae4"). InnerVolumeSpecName "kube-api-access-v7qhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.884382 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7751c2da-9178-46c8-bd67-32bd9977eae4" (UID: "7751c2da-9178-46c8-bd67-32bd9977eae4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.909366 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-config-data" (OuterVolumeSpecName: "config-data") pod "7751c2da-9178-46c8-bd67-32bd9977eae4" (UID: "7751c2da-9178-46c8-bd67-32bd9977eae4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.951386 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7qhf\" (UniqueName: \"kubernetes.io/projected/7751c2da-9178-46c8-bd67-32bd9977eae4-kube-api-access-v7qhf\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.951419 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.951429 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:25 crc kubenswrapper[4774]: I1121 14:27:25.951441 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7751c2da-9178-46c8-bd67-32bd9977eae4-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.314970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrvw" event={"ID":"7751c2da-9178-46c8-bd67-32bd9977eae4","Type":"ContainerDied","Data":"9d3298cd33916d332d0f566a61b126163b9701f0e7133615995de2d9f692e4b9"} Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.315061 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d3298cd33916d332d0f566a61b126163b9701f0e7133615995de2d9f692e4b9" Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.315132 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrvw" Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.520005 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.520060 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.540451 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.554795 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.555089 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" containerName="nova-scheduler-scheduler" containerID="cri-o://8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" gracePeriod=30 Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.575286 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.575625 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-log" containerID="cri-o://1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d" gracePeriod=30 Nov 21 14:27:26 crc kubenswrapper[4774]: I1121 14:27:26.575763 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-metadata" containerID="cri-o://cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036" gracePeriod=30 Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.326669 4774 generic.go:334] "Generic (PLEG): container finished" podID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerID="1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d" exitCode=143 Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.326761 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75556fe3-ee8c-4829-ae0e-c79cc249615d","Type":"ContainerDied","Data":"1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d"} Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.329328 4774 generic.go:334] "Generic (PLEG): container finished" podID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerID="5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd" exitCode=0 Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.329404 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerDied","Data":"5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd"} Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.329611 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-log" containerID="cri-o://2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484" gracePeriod=30 Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.329732 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-api" containerID="cri-o://dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61" gracePeriod=30 Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.336203 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": EOF" Nov 21 14:27:27 crc kubenswrapper[4774]: I1121 14:27:27.336261 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": EOF" Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.234071 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6 is running failed: container process not found" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.235123 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6 is running failed: container process not found" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.235591 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6 is running failed: container process not found" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.235631 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" containerName="nova-scheduler-scheduler" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.243900 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.341421 4774 generic.go:334] "Generic (PLEG): container finished" podID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerID="2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484" exitCode=143 Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.341532 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"137108f4-eec9-48ea-89ad-ec6adc0a01db","Type":"ContainerDied","Data":"2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484"} Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.343811 4774 generic.go:334] "Generic (PLEG): container finished" podID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" exitCode=0 Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.343902 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780","Type":"ContainerDied","Data":"8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6"} Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.343915 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.343924 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780","Type":"ContainerDied","Data":"7de3ed65ef77133d71e392e679129a092ff168ebd2a17b78af49a6e80f021211"} Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.343946 4774 scope.go:117] "RemoveContainer" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.347436 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerStarted","Data":"1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159"} Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.372280 4774 scope.go:117] "RemoveContainer" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.373005 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6\": container with ID starting with 8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6 not found: ID does not exist" containerID="8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.373058 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6"} err="failed to get container status \"8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6\": rpc error: code = NotFound desc = could not find container \"8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6\": container with ID starting with 8aa5ea4e3fe1e244f9423dc777caf1f85aaee1fbdb656b8739d68875b283d1f6 not found: ID does not exist" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.385349 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-g6cng" podStartSLOduration=2.901007504 podStartE2EDuration="6.385317822s" podCreationTimestamp="2025-11-21 14:27:22 +0000 UTC" firstStartedPulling="2025-11-21 14:27:24.280300211 +0000 UTC m=+1434.932499470" lastFinishedPulling="2025-11-21 14:27:27.764610529 +0000 UTC m=+1438.416809788" observedRunningTime="2025-11-21 14:27:28.369150904 +0000 UTC m=+1439.021350173" watchObservedRunningTime="2025-11-21 14:27:28.385317822 +0000 UTC m=+1439.037517081" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.416367 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-combined-ca-bundle\") pod \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.416671 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snpjj\" (UniqueName: \"kubernetes.io/projected/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-kube-api-access-snpjj\") pod \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.416775 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-config-data\") pod \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\" (UID: \"7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780\") " Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.422801 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-kube-api-access-snpjj" (OuterVolumeSpecName: "kube-api-access-snpjj") pod "7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" (UID: "7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780"). InnerVolumeSpecName "kube-api-access-snpjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.445879 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" (UID: "7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.458468 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-config-data" (OuterVolumeSpecName: "config-data") pod "7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" (UID: "7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.520643 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snpjj\" (UniqueName: \"kubernetes.io/projected/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-kube-api-access-snpjj\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.520690 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.520701 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.683018 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.733128 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.768538 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.769232 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" containerName="nova-scheduler-scheduler" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.769254 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" containerName="nova-scheduler-scheduler" Nov 21 14:27:28 crc kubenswrapper[4774]: E1121 14:27:28.769275 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7751c2da-9178-46c8-bd67-32bd9977eae4" containerName="nova-manage" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.769283 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7751c2da-9178-46c8-bd67-32bd9977eae4" containerName="nova-manage" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.769520 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7751c2da-9178-46c8-bd67-32bd9977eae4" containerName="nova-manage" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.769550 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" containerName="nova-scheduler-scheduler" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.772402 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.775508 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.797729 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.940617 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-config-data\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.940811 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcxpx\" (UniqueName: \"kubernetes.io/projected/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-kube-api-access-mcxpx\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:28 crc kubenswrapper[4774]: I1121 14:27:28.940900 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.042717 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-config-data\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.042809 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcxpx\" (UniqueName: \"kubernetes.io/projected/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-kube-api-access-mcxpx\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.042853 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.047672 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.048113 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-config-data\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.080486 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcxpx\" (UniqueName: \"kubernetes.io/projected/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-kube-api-access-mcxpx\") pod \"nova-scheduler-0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.106137 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:27:29 crc kubenswrapper[4774]: W1121 14:27:29.606868 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad456e3b_04a1_48d6_8fbc_39e3faa00aa0.slice/crio-ce7bc79cbe4d96ae8efafc1514532f5d9f01a6345051d4f611053214fc2380c1 WatchSource:0}: Error finding container ce7bc79cbe4d96ae8efafc1514532f5d9f01a6345051d4f611053214fc2380c1: Status 404 returned error can't find the container with id ce7bc79cbe4d96ae8efafc1514532f5d9f01a6345051d4f611053214fc2380c1 Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.613428 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.722528 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:35596->10.217.0.190:8775: read: connection reset by peer" Nov 21 14:27:29 crc kubenswrapper[4774]: I1121 14:27:29.722638 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:35594->10.217.0.190:8775: read: connection reset by peer" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.115984 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780" path="/var/lib/kubelet/pods/7aee7fd9-8e3d-44d7-ad6d-095ae4ff8780/volumes" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.174877 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.272521 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-config-data\") pod \"75556fe3-ee8c-4829-ae0e-c79cc249615d\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.272583 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-nova-metadata-tls-certs\") pod \"75556fe3-ee8c-4829-ae0e-c79cc249615d\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.272773 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75556fe3-ee8c-4829-ae0e-c79cc249615d-logs\") pod \"75556fe3-ee8c-4829-ae0e-c79cc249615d\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.272812 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2pkh\" (UniqueName: \"kubernetes.io/projected/75556fe3-ee8c-4829-ae0e-c79cc249615d-kube-api-access-m2pkh\") pod \"75556fe3-ee8c-4829-ae0e-c79cc249615d\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.272848 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-combined-ca-bundle\") pod \"75556fe3-ee8c-4829-ae0e-c79cc249615d\" (UID: \"75556fe3-ee8c-4829-ae0e-c79cc249615d\") " Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.273382 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75556fe3-ee8c-4829-ae0e-c79cc249615d-logs" (OuterVolumeSpecName: "logs") pod "75556fe3-ee8c-4829-ae0e-c79cc249615d" (UID: "75556fe3-ee8c-4829-ae0e-c79cc249615d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.293574 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75556fe3-ee8c-4829-ae0e-c79cc249615d-kube-api-access-m2pkh" (OuterVolumeSpecName: "kube-api-access-m2pkh") pod "75556fe3-ee8c-4829-ae0e-c79cc249615d" (UID: "75556fe3-ee8c-4829-ae0e-c79cc249615d"). InnerVolumeSpecName "kube-api-access-m2pkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.316934 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-config-data" (OuterVolumeSpecName: "config-data") pod "75556fe3-ee8c-4829-ae0e-c79cc249615d" (UID: "75556fe3-ee8c-4829-ae0e-c79cc249615d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.329122 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75556fe3-ee8c-4829-ae0e-c79cc249615d" (UID: "75556fe3-ee8c-4829-ae0e-c79cc249615d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.375312 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.375374 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75556fe3-ee8c-4829-ae0e-c79cc249615d-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.375386 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2pkh\" (UniqueName: \"kubernetes.io/projected/75556fe3-ee8c-4829-ae0e-c79cc249615d-kube-api-access-m2pkh\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.375398 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.375606 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "75556fe3-ee8c-4829-ae0e-c79cc249615d" (UID: "75556fe3-ee8c-4829-ae0e-c79cc249615d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.391045 4774 generic.go:334] "Generic (PLEG): container finished" podID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerID="cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036" exitCode=0 Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.391137 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75556fe3-ee8c-4829-ae0e-c79cc249615d","Type":"ContainerDied","Data":"cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036"} Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.391174 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"75556fe3-ee8c-4829-ae0e-c79cc249615d","Type":"ContainerDied","Data":"dea713ef7f1910c2320df533b639bd7508ed165b171b27b4331fc80f33637914"} Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.391181 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.391192 4774 scope.go:117] "RemoveContainer" containerID="cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.393698 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0","Type":"ContainerStarted","Data":"3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018"} Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.393722 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0","Type":"ContainerStarted","Data":"ce7bc79cbe4d96ae8efafc1514532f5d9f01a6345051d4f611053214fc2380c1"} Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.421104 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.4210808520000002 podStartE2EDuration="2.421080852s" podCreationTimestamp="2025-11-21 14:27:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:30.411258298 +0000 UTC m=+1441.063457557" watchObservedRunningTime="2025-11-21 14:27:30.421080852 +0000 UTC m=+1441.073280111" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.438184 4774 scope.go:117] "RemoveContainer" containerID="1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.438957 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.462697 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.477017 4774 scope.go:117] "RemoveContainer" containerID="cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036" Nov 21 14:27:30 crc kubenswrapper[4774]: E1121 14:27:30.477409 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036\": container with ID starting with cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036 not found: ID does not exist" containerID="cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.477447 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036"} err="failed to get container status \"cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036\": rpc error: code = NotFound desc = could not find container \"cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036\": container with ID starting with cb4e63c2b697c0d07fa24640d2ac82b6e4f9e324d9f26f3afe2c82d5a5e5d036 not found: ID does not exist" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.477472 4774 scope.go:117] "RemoveContainer" containerID="1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d" Nov 21 14:27:30 crc kubenswrapper[4774]: E1121 14:27:30.477676 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d\": container with ID starting with 1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d not found: ID does not exist" containerID="1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.477697 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d"} err="failed to get container status \"1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d\": rpc error: code = NotFound desc = could not find container \"1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d\": container with ID starting with 1e4427faa5fef2bce22366c46aec2141c5fc238b9db08fe84d86ce00a1fdc22d not found: ID does not exist" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.479164 4774 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/75556fe3-ee8c-4829-ae0e-c79cc249615d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.480483 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:27:30 crc kubenswrapper[4774]: E1121 14:27:30.481367 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-log" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.481550 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-log" Nov 21 14:27:30 crc kubenswrapper[4774]: E1121 14:27:30.481665 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-metadata" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.481787 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-metadata" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.482141 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-log" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.482250 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" containerName="nova-metadata-metadata" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.487704 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.491708 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.512671 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.513761 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.583753 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-config-data\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.583920 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.584052 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-logs\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.584100 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.584246 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqmtn\" (UniqueName: \"kubernetes.io/projected/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-kube-api-access-kqmtn\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.686084 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-config-data\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.686149 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.686202 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-logs\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.686226 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.686271 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqmtn\" (UniqueName: \"kubernetes.io/projected/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-kube-api-access-kqmtn\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.687886 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-logs\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.691136 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-config-data\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.691577 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.691887 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.705562 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqmtn\" (UniqueName: \"kubernetes.io/projected/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-kube-api-access-kqmtn\") pod \"nova-metadata-0\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " pod="openstack/nova-metadata-0" Nov 21 14:27:30 crc kubenswrapper[4774]: I1121 14:27:30.848076 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:27:31 crc kubenswrapper[4774]: I1121 14:27:31.380470 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:27:31 crc kubenswrapper[4774]: I1121 14:27:31.415077 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27","Type":"ContainerStarted","Data":"9336da1bc8d59fe9dd4c3f70da38a1f80e302a29df83f9a6ad5fe0399f289007"} Nov 21 14:27:32 crc kubenswrapper[4774]: I1121 14:27:32.105907 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75556fe3-ee8c-4829-ae0e-c79cc249615d" path="/var/lib/kubelet/pods/75556fe3-ee8c-4829-ae0e-c79cc249615d/volumes" Nov 21 14:27:32 crc kubenswrapper[4774]: I1121 14:27:32.432773 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27","Type":"ContainerStarted","Data":"f794e1beb2a4d2e0aa4f9c55c4bf3c19e6f4475d6330263d426714add8939453"} Nov 21 14:27:32 crc kubenswrapper[4774]: I1121 14:27:32.433210 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27","Type":"ContainerStarted","Data":"1c051875890cc87d20e2ccc60014cd64e0c54c66081134927ca3c7218c65fef2"} Nov 21 14:27:32 crc kubenswrapper[4774]: I1121 14:27:32.460261 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.460236531 podStartE2EDuration="2.460236531s" podCreationTimestamp="2025-11-21 14:27:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:32.455633198 +0000 UTC m=+1443.107832467" watchObservedRunningTime="2025-11-21 14:27:32.460236531 +0000 UTC m=+1443.112435800" Nov 21 14:27:32 crc kubenswrapper[4774]: I1121 14:27:32.975303 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:32 crc kubenswrapper[4774]: I1121 14:27:32.975363 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.032812 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-g6cng" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="registry-server" probeResult="failure" output=< Nov 21 14:27:34 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:27:34 crc kubenswrapper[4774]: > Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.106461 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.277597 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.362713 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-config-data\") pod \"137108f4-eec9-48ea-89ad-ec6adc0a01db\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.362875 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-combined-ca-bundle\") pod \"137108f4-eec9-48ea-89ad-ec6adc0a01db\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.362970 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/137108f4-eec9-48ea-89ad-ec6adc0a01db-logs\") pod \"137108f4-eec9-48ea-89ad-ec6adc0a01db\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.363052 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-internal-tls-certs\") pod \"137108f4-eec9-48ea-89ad-ec6adc0a01db\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.363269 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snwz8\" (UniqueName: \"kubernetes.io/projected/137108f4-eec9-48ea-89ad-ec6adc0a01db-kube-api-access-snwz8\") pod \"137108f4-eec9-48ea-89ad-ec6adc0a01db\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.363310 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-public-tls-certs\") pod \"137108f4-eec9-48ea-89ad-ec6adc0a01db\" (UID: \"137108f4-eec9-48ea-89ad-ec6adc0a01db\") " Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.367063 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/137108f4-eec9-48ea-89ad-ec6adc0a01db-logs" (OuterVolumeSpecName: "logs") pod "137108f4-eec9-48ea-89ad-ec6adc0a01db" (UID: "137108f4-eec9-48ea-89ad-ec6adc0a01db"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.382394 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/137108f4-eec9-48ea-89ad-ec6adc0a01db-kube-api-access-snwz8" (OuterVolumeSpecName: "kube-api-access-snwz8") pod "137108f4-eec9-48ea-89ad-ec6adc0a01db" (UID: "137108f4-eec9-48ea-89ad-ec6adc0a01db"). InnerVolumeSpecName "kube-api-access-snwz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.412975 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "137108f4-eec9-48ea-89ad-ec6adc0a01db" (UID: "137108f4-eec9-48ea-89ad-ec6adc0a01db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.420170 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-config-data" (OuterVolumeSpecName: "config-data") pod "137108f4-eec9-48ea-89ad-ec6adc0a01db" (UID: "137108f4-eec9-48ea-89ad-ec6adc0a01db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.432799 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "137108f4-eec9-48ea-89ad-ec6adc0a01db" (UID: "137108f4-eec9-48ea-89ad-ec6adc0a01db"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.439566 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "137108f4-eec9-48ea-89ad-ec6adc0a01db" (UID: "137108f4-eec9-48ea-89ad-ec6adc0a01db"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.455617 4774 generic.go:334] "Generic (PLEG): container finished" podID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerID="dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61" exitCode=0 Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.455678 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"137108f4-eec9-48ea-89ad-ec6adc0a01db","Type":"ContainerDied","Data":"dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61"} Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.455715 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"137108f4-eec9-48ea-89ad-ec6adc0a01db","Type":"ContainerDied","Data":"f9761038ddc9e7c6231afef0c69d5639c988a2e1967e47200236ea0dfd253ef2"} Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.455715 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.455738 4774 scope.go:117] "RemoveContainer" containerID="dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.465690 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/137108f4-eec9-48ea-89ad-ec6adc0a01db-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.465732 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.465748 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snwz8\" (UniqueName: \"kubernetes.io/projected/137108f4-eec9-48ea-89ad-ec6adc0a01db-kube-api-access-snwz8\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.465759 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.465768 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.465778 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/137108f4-eec9-48ea-89ad-ec6adc0a01db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.521390 4774 scope.go:117] "RemoveContainer" containerID="2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.524900 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.542692 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.550583 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:34 crc kubenswrapper[4774]: E1121 14:27:34.551345 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-api" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.551376 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-api" Nov 21 14:27:34 crc kubenswrapper[4774]: E1121 14:27:34.551407 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-log" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.551418 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-log" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.551711 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-api" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.551736 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" containerName="nova-api-log" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.553497 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.558399 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.559259 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.560532 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.560887 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.585202 4774 scope.go:117] "RemoveContainer" containerID="dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61" Nov 21 14:27:34 crc kubenswrapper[4774]: E1121 14:27:34.586028 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61\": container with ID starting with dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61 not found: ID does not exist" containerID="dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.586103 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61"} err="failed to get container status \"dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61\": rpc error: code = NotFound desc = could not find container \"dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61\": container with ID starting with dc226276262dd4fe302a69c21c8386ae014e3d865b3603b55c19ccd400ec1e61 not found: ID does not exist" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.586143 4774 scope.go:117] "RemoveContainer" containerID="2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484" Nov 21 14:27:34 crc kubenswrapper[4774]: E1121 14:27:34.586451 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484\": container with ID starting with 2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484 not found: ID does not exist" containerID="2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.586482 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484"} err="failed to get container status \"2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484\": rpc error: code = NotFound desc = could not find container \"2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484\": container with ID starting with 2346b54352ed53da572c5ff41cd5ec655c66368032fc0976062687d6b29cd484 not found: ID does not exist" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.671107 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.671176 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-public-tls-certs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.671209 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-config-data\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.671228 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-internal-tls-certs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.671477 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612a4642-7af7-4d93-a27f-e63a0593a511-logs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.671527 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klqkd\" (UniqueName: \"kubernetes.io/projected/612a4642-7af7-4d93-a27f-e63a0593a511-kube-api-access-klqkd\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.775507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.775598 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-public-tls-certs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.775643 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-config-data\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.775671 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-internal-tls-certs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.775736 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612a4642-7af7-4d93-a27f-e63a0593a511-logs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.775762 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klqkd\" (UniqueName: \"kubernetes.io/projected/612a4642-7af7-4d93-a27f-e63a0593a511-kube-api-access-klqkd\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.776725 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612a4642-7af7-4d93-a27f-e63a0593a511-logs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.780093 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.781961 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-config-data\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.781956 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-public-tls-certs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.785348 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-internal-tls-certs\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.796373 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klqkd\" (UniqueName: \"kubernetes.io/projected/612a4642-7af7-4d93-a27f-e63a0593a511-kube-api-access-klqkd\") pod \"nova-api-0\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " pod="openstack/nova-api-0" Nov 21 14:27:34 crc kubenswrapper[4774]: I1121 14:27:34.909688 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:27:35 crc kubenswrapper[4774]: I1121 14:27:35.726719 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:27:35 crc kubenswrapper[4774]: W1121 14:27:35.738005 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod612a4642_7af7_4d93_a27f_e63a0593a511.slice/crio-48bb40685a8dc40e39ac70ae809f434980afb59629dacaf9d97f1151a65ca685 WatchSource:0}: Error finding container 48bb40685a8dc40e39ac70ae809f434980afb59629dacaf9d97f1151a65ca685: Status 404 returned error can't find the container with id 48bb40685a8dc40e39ac70ae809f434980afb59629dacaf9d97f1151a65ca685 Nov 21 14:27:35 crc kubenswrapper[4774]: I1121 14:27:35.853053 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 14:27:35 crc kubenswrapper[4774]: I1121 14:27:35.854379 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 14:27:36 crc kubenswrapper[4774]: I1121 14:27:36.108623 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="137108f4-eec9-48ea-89ad-ec6adc0a01db" path="/var/lib/kubelet/pods/137108f4-eec9-48ea-89ad-ec6adc0a01db/volumes" Nov 21 14:27:36 crc kubenswrapper[4774]: I1121 14:27:36.501000 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"612a4642-7af7-4d93-a27f-e63a0593a511","Type":"ContainerStarted","Data":"581678da81a51c7400fc4c31a5574369fb2025f18fff099bc0af08132f13654f"} Nov 21 14:27:36 crc kubenswrapper[4774]: I1121 14:27:36.501508 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"612a4642-7af7-4d93-a27f-e63a0593a511","Type":"ContainerStarted","Data":"21b0a39b83253fe42307631f2014556d041f1359f887388fb2f8c11c1f9d769b"} Nov 21 14:27:36 crc kubenswrapper[4774]: I1121 14:27:36.501527 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"612a4642-7af7-4d93-a27f-e63a0593a511","Type":"ContainerStarted","Data":"48bb40685a8dc40e39ac70ae809f434980afb59629dacaf9d97f1151a65ca685"} Nov 21 14:27:36 crc kubenswrapper[4774]: I1121 14:27:36.533179 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.533153113 podStartE2EDuration="2.533153113s" podCreationTimestamp="2025-11-21 14:27:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:27:36.523919686 +0000 UTC m=+1447.176118945" watchObservedRunningTime="2025-11-21 14:27:36.533153113 +0000 UTC m=+1447.185352382" Nov 21 14:27:39 crc kubenswrapper[4774]: I1121 14:27:39.106304 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 14:27:39 crc kubenswrapper[4774]: I1121 14:27:39.145635 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 14:27:39 crc kubenswrapper[4774]: I1121 14:27:39.573686 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 14:27:40 crc kubenswrapper[4774]: I1121 14:27:40.848931 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 14:27:40 crc kubenswrapper[4774]: I1121 14:27:40.849904 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 14:27:41 crc kubenswrapper[4774]: I1121 14:27:41.875186 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 14:27:41 crc kubenswrapper[4774]: I1121 14:27:41.875207 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 21 14:27:43 crc kubenswrapper[4774]: I1121 14:27:43.074792 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:43 crc kubenswrapper[4774]: I1121 14:27:43.133871 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:43 crc kubenswrapper[4774]: I1121 14:27:43.317809 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g6cng"] Nov 21 14:27:44 crc kubenswrapper[4774]: I1121 14:27:44.585289 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-g6cng" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="registry-server" containerID="cri-o://1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159" gracePeriod=2 Nov 21 14:27:44 crc kubenswrapper[4774]: I1121 14:27:44.910994 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:27:44 crc kubenswrapper[4774]: I1121 14:27:44.911491 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.145546 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.242548 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content\") pod \"eaa5ff38-d544-4641-90cb-591db96ac40b\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.242726 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-utilities\") pod \"eaa5ff38-d544-4641-90cb-591db96ac40b\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.242977 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxlg8\" (UniqueName: \"kubernetes.io/projected/eaa5ff38-d544-4641-90cb-591db96ac40b-kube-api-access-zxlg8\") pod \"eaa5ff38-d544-4641-90cb-591db96ac40b\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.244426 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-utilities" (OuterVolumeSpecName: "utilities") pod "eaa5ff38-d544-4641-90cb-591db96ac40b" (UID: "eaa5ff38-d544-4641-90cb-591db96ac40b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.250632 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaa5ff38-d544-4641-90cb-591db96ac40b-kube-api-access-zxlg8" (OuterVolumeSpecName: "kube-api-access-zxlg8") pod "eaa5ff38-d544-4641-90cb-591db96ac40b" (UID: "eaa5ff38-d544-4641-90cb-591db96ac40b"). InnerVolumeSpecName "kube-api-access-zxlg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.345190 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eaa5ff38-d544-4641-90cb-591db96ac40b" (UID: "eaa5ff38-d544-4641-90cb-591db96ac40b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.346438 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content\") pod \"eaa5ff38-d544-4641-90cb-591db96ac40b\" (UID: \"eaa5ff38-d544-4641-90cb-591db96ac40b\") " Nov 21 14:27:45 crc kubenswrapper[4774]: W1121 14:27:45.346602 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/eaa5ff38-d544-4641-90cb-591db96ac40b/volumes/kubernetes.io~empty-dir/catalog-content Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.346658 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eaa5ff38-d544-4641-90cb-591db96ac40b" (UID: "eaa5ff38-d544-4641-90cb-591db96ac40b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.347592 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxlg8\" (UniqueName: \"kubernetes.io/projected/eaa5ff38-d544-4641-90cb-591db96ac40b-kube-api-access-zxlg8\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.347618 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.347637 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eaa5ff38-d544-4641-90cb-591db96ac40b-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.621239 4774 generic.go:334] "Generic (PLEG): container finished" podID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerID="1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159" exitCode=0 Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.621316 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerDied","Data":"1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159"} Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.621363 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-g6cng" event={"ID":"eaa5ff38-d544-4641-90cb-591db96ac40b","Type":"ContainerDied","Data":"919b14cd6d9111c8afa6d4130123b42a846d732cc51316b225425d53f5aaa162"} Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.621391 4774 scope.go:117] "RemoveContainer" containerID="1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.623460 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-g6cng" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.657858 4774 scope.go:117] "RemoveContainer" containerID="5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.672723 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-g6cng"] Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.683252 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-g6cng"] Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.690597 4774 scope.go:117] "RemoveContainer" containerID="5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.728562 4774 scope.go:117] "RemoveContainer" containerID="1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159" Nov 21 14:27:45 crc kubenswrapper[4774]: E1121 14:27:45.728892 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159\": container with ID starting with 1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159 not found: ID does not exist" containerID="1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.728934 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159"} err="failed to get container status \"1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159\": rpc error: code = NotFound desc = could not find container \"1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159\": container with ID starting with 1d069640cefe1e42aa3be771eb5a586c22b74ade7fc9e740abc56f6963816159 not found: ID does not exist" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.728965 4774 scope.go:117] "RemoveContainer" containerID="5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd" Nov 21 14:27:45 crc kubenswrapper[4774]: E1121 14:27:45.729365 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd\": container with ID starting with 5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd not found: ID does not exist" containerID="5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.729416 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd"} err="failed to get container status \"5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd\": rpc error: code = NotFound desc = could not find container \"5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd\": container with ID starting with 5bb32cb57b043f238752d5ea56b400f6cac761f27c79f9a2958aa35e84ff78bd not found: ID does not exist" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.729451 4774 scope.go:117] "RemoveContainer" containerID="5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58" Nov 21 14:27:45 crc kubenswrapper[4774]: E1121 14:27:45.729726 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58\": container with ID starting with 5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58 not found: ID does not exist" containerID="5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.729758 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58"} err="failed to get container status \"5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58\": rpc error: code = NotFound desc = could not find container \"5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58\": container with ID starting with 5628bc66ab2a3a571efaf4dc2552aac57ea30dd6fb0808d68f2a92289f85ad58 not found: ID does not exist" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.924026 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 21 14:27:45 crc kubenswrapper[4774]: I1121 14:27:45.924070 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Nov 21 14:27:46 crc kubenswrapper[4774]: I1121 14:27:46.105436 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" path="/var/lib/kubelet/pods/eaa5ff38-d544-4641-90cb-591db96ac40b/volumes" Nov 21 14:27:47 crc kubenswrapper[4774]: I1121 14:27:47.663446 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 21 14:27:50 crc kubenswrapper[4774]: I1121 14:27:50.854008 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 14:27:50 crc kubenswrapper[4774]: I1121 14:27:50.866963 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 14:27:50 crc kubenswrapper[4774]: I1121 14:27:50.868379 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 14:27:51 crc kubenswrapper[4774]: I1121 14:27:51.689388 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 14:27:54 crc kubenswrapper[4774]: I1121 14:27:54.916581 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 14:27:54 crc kubenswrapper[4774]: I1121 14:27:54.917738 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 14:27:54 crc kubenswrapper[4774]: I1121 14:27:54.917934 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 14:27:54 crc kubenswrapper[4774]: I1121 14:27:54.932170 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 14:27:55 crc kubenswrapper[4774]: I1121 14:27:55.727566 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 14:27:55 crc kubenswrapper[4774]: I1121 14:27:55.737749 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 14:28:12 crc kubenswrapper[4774]: I1121 14:28:12.773488 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 21 14:28:12 crc kubenswrapper[4774]: I1121 14:28:12.774504 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" containerName="openstackclient" containerID="cri-o://dfbedbce9f436ba2d67af7321bdcc2d58c19cd6c6808e9ddda88da0a09afd9af" gracePeriod=2 Nov 21 14:28:12 crc kubenswrapper[4774]: I1121 14:28:12.950931 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 21 14:28:12 crc kubenswrapper[4774]: I1121 14:28:12.982373 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-ld98r"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.016605 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-sdw4x"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.016972 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-metrics-sdw4x" podUID="4549a9b5-fb19-4dae-9fee-b03d5d49e95d" containerName="openstack-network-exporter" containerID="cri-o://ae41437905bab4b8a42a6e934b47544bc731aad356664fbc208508fb4483c6af" gracePeriod=30 Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.038057 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2sxpw"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.125010 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican6523-account-delete-t2fnw"] Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.125661 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="extract-utilities" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.125678 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="extract-utilities" Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.125694 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="registry-server" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.125701 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="registry-server" Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.125726 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="extract-content" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.125733 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="extract-content" Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.125754 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" containerName="openstackclient" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.125759 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" containerName="openstackclient" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.126004 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" containerName="openstackclient" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.126021 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaa5ff38-d544-4641-90cb-591db96ac40b" containerName="registry-server" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.128725 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.139262 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.139649 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" containerID="cri-o://382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" gracePeriod=30 Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.139964 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-northd-0" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="openstack-network-exporter" containerID="cri-o://d1073de69bf390fe30269d7b088a8b3fbfa034bdc8ef77499fb5ba4f9878eef7" gracePeriod=30 Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.193662 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder5822-account-delete-7mbkj"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.215807 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican6523-account-delete-t2fnw"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.215958 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.248932 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder5822-account-delete-7mbkj"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.300267 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts\") pod \"barbican6523-account-delete-t2fnw\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.300671 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmhmm\" (UniqueName: \"kubernetes.io/projected/79476096-5d34-4e8a-9f33-3127bacf4e60-kube-api-access-rmhmm\") pod \"barbican6523-account-delete-t2fnw\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.327897 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.349152 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement2802-account-delete-ltg2g"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.351143 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.382378 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement2802-account-delete-ltg2g"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.417905 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance128f-account-delete-w4g2r"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.419891 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.422982 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts\") pod \"barbican6523-account-delete-t2fnw\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.423341 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a1181b-900b-40dc-9855-795653215df3-operator-scripts\") pod \"cinder5822-account-delete-7mbkj\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.423406 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmhmm\" (UniqueName: \"kubernetes.io/projected/79476096-5d34-4e8a-9f33-3127bacf4e60-kube-api-access-rmhmm\") pod \"barbican6523-account-delete-t2fnw\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.423676 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdgw5\" (UniqueName: \"kubernetes.io/projected/f7a1181b-900b-40dc-9855-795653215df3-kube-api-access-kdgw5\") pod \"cinder5822-account-delete-7mbkj\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.427847 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts\") pod \"barbican6523-account-delete-t2fnw\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.437222 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance128f-account-delete-w4g2r"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.477408 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-97g4j"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.491239 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-97g4j"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.515045 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-h86r9"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.526090 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a1181b-900b-40dc-9855-795653215df3-operator-scripts\") pod \"cinder5822-account-delete-7mbkj\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.526210 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s47cc\" (UniqueName: \"kubernetes.io/projected/2bf5bbb4-9ebb-41b9-a888-4144660d088c-kube-api-access-s47cc\") pod \"placement2802-account-delete-ltg2g\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.526256 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bf5bbb4-9ebb-41b9-a888-4144660d088c-operator-scripts\") pod \"placement2802-account-delete-ltg2g\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.526333 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdgw5\" (UniqueName: \"kubernetes.io/projected/f7a1181b-900b-40dc-9855-795653215df3-kube-api-access-kdgw5\") pod \"cinder5822-account-delete-7mbkj\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.526929 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmhmm\" (UniqueName: \"kubernetes.io/projected/79476096-5d34-4e8a-9f33-3127bacf4e60-kube-api-access-rmhmm\") pod \"barbican6523-account-delete-t2fnw\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.526939 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.527077 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data podName:64e33a39-c371-477f-b1c9-d58189db4bc8 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:14.027056159 +0000 UTC m=+1484.679255408 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data") pod "rabbitmq-cell1-server-0" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8") : configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.532932 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a1181b-900b-40dc-9855-795653215df3-operator-scripts\") pod \"cinder5822-account-delete-7mbkj\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.629235 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s47cc\" (UniqueName: \"kubernetes.io/projected/2bf5bbb4-9ebb-41b9-a888-4144660d088c-kube-api-access-s47cc\") pod \"placement2802-account-delete-ltg2g\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.634403 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bf5bbb4-9ebb-41b9-a888-4144660d088c-operator-scripts\") pod \"placement2802-account-delete-ltg2g\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.634879 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a3ae90b-73bb-4fbf-887b-c6e432338502-operator-scripts\") pod \"glance128f-account-delete-w4g2r\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.634993 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc44z\" (UniqueName: \"kubernetes.io/projected/7a3ae90b-73bb-4fbf-887b-c6e432338502-kube-api-access-mc44z\") pod \"glance128f-account-delete-w4g2r\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.633828 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-h86r9"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.636605 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bf5bbb4-9ebb-41b9-a888-4144660d088c-operator-scripts\") pod \"placement2802-account-delete-ltg2g\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.637868 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdgw5\" (UniqueName: \"kubernetes.io/projected/f7a1181b-900b-40dc-9855-795653215df3-kube-api-access-kdgw5\") pod \"cinder5822-account-delete-7mbkj\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.664888 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s47cc\" (UniqueName: \"kubernetes.io/projected/2bf5bbb4-9ebb-41b9-a888-4144660d088c-kube-api-access-s47cc\") pod \"placement2802-account-delete-ltg2g\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.675632 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutronfb24-account-delete-z2nw8"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.677835 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.703183 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.728366 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.737404 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc44z\" (UniqueName: \"kubernetes.io/projected/7a3ae90b-73bb-4fbf-887b-c6e432338502-kube-api-access-mc44z\") pod \"glance128f-account-delete-w4g2r\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.737642 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a3ae90b-73bb-4fbf-887b-c6e432338502-operator-scripts\") pod \"glance128f-account-delete-w4g2r\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.738603 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a3ae90b-73bb-4fbf-887b-c6e432338502-operator-scripts\") pod \"glance128f-account-delete-w4g2r\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.769987 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.773123 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronfb24-account-delete-z2nw8"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.774426 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc44z\" (UniqueName: \"kubernetes.io/projected/7a3ae90b-73bb-4fbf-887b-c6e432338502-kube-api-access-mc44z\") pod \"glance128f-account-delete-w4g2r\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.836959 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.839922 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-85xdf"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.841478 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts\") pod \"neutronfb24-account-delete-z2nw8\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.841573 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4fbc\" (UniqueName: \"kubernetes.io/projected/650c7a92-1469-4a9c-9a60-a846fe7ed823-kube-api-access-v4fbc\") pod \"neutronfb24-account-delete-z2nw8\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.867920 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-85xdf"] Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.878663 4774 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack/ovn-controller-2sxpw" message=< Nov 21 14:28:13 crc kubenswrapper[4774]: Exiting ovn-controller (1) [ OK ] Nov 21 14:28:13 crc kubenswrapper[4774]: > Nov 21 14:28:13 crc kubenswrapper[4774]: E1121 14:28:13.878725 4774 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack/ovn-controller-2sxpw" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" containerID="cri-o://5835b1e71040c97609879beff0fc752dee4bbaaacdb26af845a02a7a42242f5c" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.878780 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-2sxpw" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" containerID="cri-o://5835b1e71040c97609879beff0fc752dee4bbaaacdb26af845a02a7a42242f5c" gracePeriod=30 Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.885534 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.886012 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="openstack-network-exporter" containerID="cri-o://2c34ab166ca72aebd1fd6aa1a5cc31cb68ddf856a803c3f00aa0f1b318e937ff" gracePeriod=300 Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.910176 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novaapi242e-account-delete-tbszv"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.912000 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.933906 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi242e-account-delete-tbszv"] Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.947442 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts\") pod \"neutronfb24-account-delete-z2nw8\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.947564 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4fbc\" (UniqueName: \"kubernetes.io/projected/650c7a92-1469-4a9c-9a60-a846fe7ed823-kube-api-access-v4fbc\") pod \"neutronfb24-account-delete-z2nw8\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:13 crc kubenswrapper[4774]: I1121 14:28:13.948572 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts\") pod \"neutronfb24-account-delete-z2nw8\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.006166 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-6wkpn"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.006806 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4fbc\" (UniqueName: \"kubernetes.io/projected/650c7a92-1469-4a9c-9a60-a846fe7ed823-kube-api-access-v4fbc\") pod \"neutronfb24-account-delete-z2nw8\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.046508 4774 generic.go:334] "Generic (PLEG): container finished" podID="7ee04f12-987f-4f31-81b3-10cd067af310" containerID="5835b1e71040c97609879beff0fc752dee4bbaaacdb26af845a02a7a42242f5c" exitCode=0 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.046670 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw" event={"ID":"7ee04f12-987f-4f31-81b3-10cd067af310","Type":"ContainerDied","Data":"5835b1e71040c97609879beff0fc752dee4bbaaacdb26af845a02a7a42242f5c"} Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.048641 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-6wkpn"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.122745 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.174877 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sdw4x_4549a9b5-fb19-4dae-9fee-b03d5d49e95d/openstack-network-exporter/0.log" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.175254 4774 generic.go:334] "Generic (PLEG): container finished" podID="4549a9b5-fb19-4dae-9fee-b03d5d49e95d" containerID="ae41437905bab4b8a42a6e934b47544bc731aad356664fbc208508fb4483c6af" exitCode=2 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.221724 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts\") pod \"novaapi242e-account-delete-tbszv\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.221987 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4m9k\" (UniqueName: \"kubernetes.io/projected/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-kube-api-access-r4m9k\") pod \"novaapi242e-account-delete-tbszv\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.222281 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.222352 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data podName:64e33a39-c371-477f-b1c9-d58189db4bc8 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:15.222326818 +0000 UTC m=+1485.874526077 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data") pod "rabbitmq-cell1-server-0" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8") : configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.226252 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-sb-0" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="ovsdbserver-sb" containerID="cri-o://6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1" gracePeriod=300 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.343919 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts\") pod \"novaapi242e-account-delete-tbszv\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.344066 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4m9k\" (UniqueName: \"kubernetes.io/projected/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-kube-api-access-r4m9k\") pod \"novaapi242e-account-delete-tbszv\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.372770 4774 generic.go:334] "Generic (PLEG): container finished" podID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerID="d1073de69bf390fe30269d7b088a8b3fbfa034bdc8ef77499fb5ba4f9878eef7" exitCode=2 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.377310 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts\") pod \"novaapi242e-account-delete-tbszv\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.389484 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92400548-ccdd-4e2a-9da5-3aeef0628e31" path="/var/lib/kubelet/pods/92400548-ccdd-4e2a-9da5-3aeef0628e31/volumes" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.394222 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6996afa-3f45-411b-ac41-acf012c9c45e" path="/var/lib/kubelet/pods/a6996afa-3f45-411b-ac41-acf012c9c45e/volumes" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.394966 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc5f9b69-3714-4aee-8d39-1618184dbb91" path="/var/lib/kubelet/pods/dc5f9b69-3714-4aee-8d39-1618184dbb91/volumes" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.404239 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3386949-a3f5-453c-953e-8deedb418d28" path="/var/lib/kubelet/pods/e3386949-a3f5-453c-953e-8deedb418d28/volumes" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.405325 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sdw4x" event={"ID":"4549a9b5-fb19-4dae-9fee-b03d5d49e95d","Type":"ContainerDied","Data":"ae41437905bab4b8a42a6e934b47544bc731aad356664fbc208508fb4483c6af"} Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.405371 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/novacell03102-account-delete-pwxhc"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.406684 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4c16af5b-77af-4097-ad41-42aaa0aac4a1","Type":"ContainerDied","Data":"d1073de69bf390fe30269d7b088a8b3fbfa034bdc8ef77499fb5ba4f9878eef7"} Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.406720 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell03102-account-delete-pwxhc"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.406736 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6868d89965-nrgpl"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.407087 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-287qb"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.407185 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.409163 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="dnsmasq-dns" containerID="cri-o://1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937" gracePeriod=10 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.440150 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4m9k\" (UniqueName: \"kubernetes.io/projected/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-kube-api-access-r4m9k\") pod \"novaapi242e-account-delete-tbszv\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.443752 4774 log.go:32] "ExecSync cmd from runtime service failed" err=< Nov 21 14:28:14 crc kubenswrapper[4774]: rpc error: code = Unknown desc = command error: setns `mnt`: Bad file descriptor Nov 21 14:28:14 crc kubenswrapper[4774]: fail startup Nov 21 14:28:14 crc kubenswrapper[4774]: , stdout: , stderr: , exit code -1 Nov 21 14:28:14 crc kubenswrapper[4774]: > containerID="6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.447085 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1 is running failed: container process not found" containerID="6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.456456 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1 is running failed: container process not found" containerID="6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.456551 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-sb-0" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="ovsdbserver-sb" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.502184 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-287qb"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.542323 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.545094 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.197:5353: connect: connection refused" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.558205 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-rtvlq"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.560975 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts\") pod \"novacell03102-account-delete-pwxhc\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.561049 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf2lt\" (UniqueName: \"kubernetes.io/projected/3311b6bd-a19b-402c-afe4-22222098c669-kube-api-access-jf2lt\") pod \"novacell03102-account-delete-pwxhc\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.611462 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-rtvlq"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.617924 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.656543 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.657478 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="openstack-network-exporter" containerID="cri-o://f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79" gracePeriod=300 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.679170 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.679475 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api-log" containerID="cri-o://400661145f174c0b9169da8eed8077bb5592eced771c3239659552c524ba7eb9" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.679558 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts\") pod \"novacell03102-account-delete-pwxhc\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.679622 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf2lt\" (UniqueName: \"kubernetes.io/projected/3311b6bd-a19b-402c-afe4-22222098c669-kube-api-access-jf2lt\") pod \"novacell03102-account-delete-pwxhc\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.679784 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api" containerID="cri-o://038ee870a823946f43bd1d652272038621a6567ad96155f489796343f86963d7" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.680838 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts\") pod \"novacell03102-account-delete-pwxhc\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.719876 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.720278 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="cinder-scheduler" containerID="cri-o://cfabf58a3660a117c2e4bc8be1a895c8cb8999d2d918f92bec39bbee7161485e" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.720791 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="probe" containerID="cri-o://f284772158aa9afb2ac683ea5db800eb76ca8ee198f42ac67c00afb5d059483f" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.741048 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.745340 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.752902 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8d9694746-ctlgk"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.753314 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-8d9694746-ctlgk" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-log" containerID="cri-o://e1bece3865eab576d0537b2757e1996d3eb563738ed1cbdb0bbc09abd23a1ae4" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.753506 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-8d9694746-ctlgk" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-api" containerID="cri-o://cc1d60dd83d00832b380eb3c950ba9940eb8e75dc9cfe60f03f0990330129de2" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.753750 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.753855 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.780903 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrvw"] Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.783288 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:14 crc kubenswrapper[4774]: E1121 14:28:14.783362 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:28:15.283339335 +0000 UTC m=+1485.935538594 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.810453 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrvw"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.841007 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-ggjhq"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.850690 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-ggjhq"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.862847 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.868064 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-log" containerID="cri-o://cf5874a27369c23eb02a38e945173e313d37d5eea273de5528c6e3f4c20042c1" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.869245 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-httpd" containerID="cri-o://c3d2080c4d1517a927737cdfe470200b33ed1dfc064dd0c21a2afa217e1ea935" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.875866 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf2lt\" (UniqueName: \"kubernetes.io/projected/3311b6bd-a19b-402c-afe4-22222098c669-kube-api-access-jf2lt\") pod \"novacell03102-account-delete-pwxhc\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.916792 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.917280 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-server" containerID="cri-o://9bc27234572696e44f557a383a86c888ed805788bafa91dd14bb78cdefab3b32" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921540 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-updater" containerID="cri-o://53a824add7ac0cc57042d70c06b911c7e6a34e1c2010603ee4d6fbc3ed438924" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921655 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="swift-recon-cron" containerID="cri-o://01b0a87790fffb8562d8320c5dbbbc5a07eb54a2e1277dfed78d3269edb2bee5" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921705 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="rsync" containerID="cri-o://89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921751 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-expirer" containerID="cri-o://2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921790 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-updater" containerID="cri-o://834a28d7bd2427951828771181afd8a938666b3888becfb912c78842574fb9ae" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921868 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-auditor" containerID="cri-o://cd578a4be6466cb961f25126446b01ae08dfe77292401d7b6dc5269637ee2e33" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921928 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-replicator" containerID="cri-o://1c4505cc2138852bc1de85d4a3368df20df9e8fc72c0b4f0a772d89a565a9d5c" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.921973 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-server" containerID="cri-o://4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.922058 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-reaper" containerID="cri-o://78cf57423bdba8a0adb0930011b88d9283fb739e1b67a73287f7ff3ca582a4a1" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.922113 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-auditor" containerID="cri-o://51b9f59856cff7bf6c2e7c193206a014c3b1c1b6ff7e65f0ffe94ef9fbaf701e" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.922152 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-replicator" containerID="cri-o://b0ff7c749c18817ad064c15649c712f4e89466819f6dd77e940ca84ed95e90a8" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.922184 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-server" containerID="cri-o://b02da81747033bd11ee8ad86892e420553f3c4e14b394a17b83ad199bf283c8e" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.922234 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-replicator" containerID="cri-o://d3b647fd9ca3744848c9bba9996b244e70638e808df6e12566f545983a15f3cc" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.922272 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-storage-0" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-auditor" containerID="cri-o://4a079344642c9fb3a26394a82468d59daabece732a5466662ee8aeaa883a5bb3" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.956418 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.956886 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-log" containerID="cri-o://66249ecbe2c3348c6acd48e9804c896c943d2119544945a4641b3cd22603525d" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.957631 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-httpd" containerID="cri-o://08dcf92110aca28bb33e09d2cf80555b027cc58cf28e0ba6099d79517b3e3e96" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.981026 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-74459fb479-fkm77"] Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.981395 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-74459fb479-fkm77" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker-log" containerID="cri-o://571c60de19b673d0a2cf6499c4d0ad765e15a4171ae78a3dcb8552bb3605e8e6" gracePeriod=30 Nov 21 14:28:14 crc kubenswrapper[4774]: I1121 14:28:14.981564 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-74459fb479-fkm77" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker" containerID="cri-o://123168316f49f1c892f63242e19f929cf760d9f3f3bdcc32a34469541b54b183" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.005135 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5546774f69-cpnh7"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.005832 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5546774f69-cpnh7" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-api" containerID="cri-o://eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.006462 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5546774f69-cpnh7" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-httpd" containerID="cri-o://1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.066013 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-866df86b64-5t8kn"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.066436 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener-log" containerID="cri-o://1bed157d3f1b09ec22281912c29b9fe8e5b372b41ebbf607b1b08a4791141c7e" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.066622 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener" containerID="cri-o://72732cc3c72816545f8f6bd38e3894a25c914501104260c3cdc2219287bc3e97" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.086366 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovsdbserver-nb-0" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="ovsdbserver-nb" containerID="cri-o://ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" gracePeriod=300 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.094161 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/swift-proxy-748c4cc85c-dkrhb" secret="" err="secret \"swift-swift-dockercfg-4q68d\" not found" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.106074 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.134257 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.134609 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" containerID="cri-o://1c051875890cc87d20e2ccc60014cd64e0c54c66081134927ca3c7218c65fef2" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.135332 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" containerID="cri-o://f794e1beb2a4d2e0aa4f9c55c4bf3c19e6f4475d6330263d426714add8939453" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.152586 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-zgkzk"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.169786 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-zgkzk"] Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.198628 4774 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.198663 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.198679 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-748c4cc85c-dkrhb: [secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.198753 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift podName:4dd9e6d7-d0b1-49f3-920a-34e434835bfa nodeName:}" failed. No retries permitted until 2025-11-21 14:28:15.698727829 +0000 UTC m=+1486.350927088 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift") pod "swift-proxy-748c4cc85c-dkrhb" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa") : [secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.240330 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-576b48cd9b-wr2q7"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.240950 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-576b48cd9b-wr2q7" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api-log" containerID="cri-o://95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.241506 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-576b48cd9b-wr2q7" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api" containerID="cri-o://1a0a15cfd145eaf76485365a7148d16577e73bc0add7da74f8b15ec9b79a5303" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.285795 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.286189 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-log" containerID="cri-o://21b0a39b83253fe42307631f2014556d041f1359f887388fb2f8c11c1f9d769b" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.286989 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-api" containerID="cri-o://581678da81a51c7400fc4c31a5574369fb2025f18fff099bc0af08132f13654f" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.300304 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.300365 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data podName:64e33a39-c371-477f-b1c9-d58189db4bc8 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:17.300349958 +0000 UTC m=+1487.952549217 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data") pod "rabbitmq-cell1-server-0" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8") : configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.301367 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.301406 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:28:16.301398668 +0000 UTC m=+1486.953597917 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.315441 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-a40a-account-create-bmj2k"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.354344 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-a40a-account-create-bmj2k"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.371691 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" containerID="cri-o://b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" gracePeriod=28 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.428519 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.474521 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.474953 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e0f5811f-60f6-4820-b981-715448365e52" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://36dd18acf6da72cf687d626808b4dda1668438a188a9f1018c121f0a0c64d299" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.483193 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.483610 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" containerName="nova-scheduler-scheduler" containerID="cri-o://3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.639972 4774 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Nov 21 14:28:15 crc kubenswrapper[4774]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 21 14:28:15 crc kubenswrapper[4774]: + source /usr/local/bin/container-scripts/functions Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNBridge=br-int Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNRemote=tcp:localhost:6642 Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNEncapType=geneve Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNAvailabilityZones= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ EnableChassisAsGateway=true Nov 21 14:28:15 crc kubenswrapper[4774]: ++ PhysicalNetworks= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNHostName= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 21 14:28:15 crc kubenswrapper[4774]: ++ ovs_dir=/var/lib/openvswitch Nov 21 14:28:15 crc kubenswrapper[4774]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 21 14:28:15 crc kubenswrapper[4774]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 21 14:28:15 crc kubenswrapper[4774]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + cleanup_ovsdb_server_semaphore Nov 21 14:28:15 crc kubenswrapper[4774]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 21 14:28:15 crc kubenswrapper[4774]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 21 14:28:15 crc kubenswrapper[4774]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack/ovn-controller-ovs-ld98r" message=< Nov 21 14:28:15 crc kubenswrapper[4774]: Exiting ovsdb-server (5) [ OK ] Nov 21 14:28:15 crc kubenswrapper[4774]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 21 14:28:15 crc kubenswrapper[4774]: + source /usr/local/bin/container-scripts/functions Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNBridge=br-int Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNRemote=tcp:localhost:6642 Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNEncapType=geneve Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNAvailabilityZones= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ EnableChassisAsGateway=true Nov 21 14:28:15 crc kubenswrapper[4774]: ++ PhysicalNetworks= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNHostName= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 21 14:28:15 crc kubenswrapper[4774]: ++ ovs_dir=/var/lib/openvswitch Nov 21 14:28:15 crc kubenswrapper[4774]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 21 14:28:15 crc kubenswrapper[4774]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 21 14:28:15 crc kubenswrapper[4774]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + cleanup_ovsdb_server_semaphore Nov 21 14:28:15 crc kubenswrapper[4774]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 21 14:28:15 crc kubenswrapper[4774]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 21 14:28:15 crc kubenswrapper[4774]: > Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.640034 4774 kuberuntime_container.go:691] "PreStop hook failed" err=< Nov 21 14:28:15 crc kubenswrapper[4774]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Nov 21 14:28:15 crc kubenswrapper[4774]: + source /usr/local/bin/container-scripts/functions Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNBridge=br-int Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNRemote=tcp:localhost:6642 Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNEncapType=geneve Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNAvailabilityZones= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ EnableChassisAsGateway=true Nov 21 14:28:15 crc kubenswrapper[4774]: ++ PhysicalNetworks= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ OVNHostName= Nov 21 14:28:15 crc kubenswrapper[4774]: ++ DB_FILE=/etc/openvswitch/conf.db Nov 21 14:28:15 crc kubenswrapper[4774]: ++ ovs_dir=/var/lib/openvswitch Nov 21 14:28:15 crc kubenswrapper[4774]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Nov 21 14:28:15 crc kubenswrapper[4774]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Nov 21 14:28:15 crc kubenswrapper[4774]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + sleep 0.5 Nov 21 14:28:15 crc kubenswrapper[4774]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Nov 21 14:28:15 crc kubenswrapper[4774]: + cleanup_ovsdb_server_semaphore Nov 21 14:28:15 crc kubenswrapper[4774]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Nov 21 14:28:15 crc kubenswrapper[4774]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Nov 21 14:28:15 crc kubenswrapper[4774]: > pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" containerID="cri-o://08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.640075 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" containerID="cri-o://08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" gracePeriod=28 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.667623 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="rabbitmq" containerID="cri-o://e113bb91e61fb20bd55da6f381dd07a86f741c04641af203c9cd800b9d16d231" gracePeriod=604800 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.703445 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718206 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="834a28d7bd2427951828771181afd8a938666b3888becfb912c78842574fb9ae" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718241 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="cd578a4be6466cb961f25126446b01ae08dfe77292401d7b6dc5269637ee2e33" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718253 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="1c4505cc2138852bc1de85d4a3368df20df9e8fc72c0b4f0a772d89a565a9d5c" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718263 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="51b9f59856cff7bf6c2e7c193206a014c3b1c1b6ff7e65f0ffe94ef9fbaf701e" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718272 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="4a079344642c9fb3a26394a82468d59daabece732a5466662ee8aeaa883a5bb3" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718285 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="d3b647fd9ca3744848c9bba9996b244e70638e808df6e12566f545983a15f3cc" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.718265 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"834a28d7bd2427951828771181afd8a938666b3888becfb912c78842574fb9ae"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.720079 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"cd578a4be6466cb961f25126446b01ae08dfe77292401d7b6dc5269637ee2e33"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.720109 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"1c4505cc2138852bc1de85d4a3368df20df9e8fc72c0b4f0a772d89a565a9d5c"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.720126 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"51b9f59856cff7bf6c2e7c193206a014c3b1c1b6ff7e65f0ffe94ef9fbaf701e"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.720149 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"4a079344642c9fb3a26394a82468d59daabece732a5466662ee8aeaa883a5bb3"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.720192 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"d3b647fd9ca3744848c9bba9996b244e70638e808df6e12566f545983a15f3cc"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.752923 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"29fd4802-19c7-4e11-b776-c505c03206b0","Type":"ContainerDied","Data":"66249ecbe2c3348c6acd48e9804c896c943d2119544945a4641b3cd22603525d"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.752944 4774 generic.go:334] "Generic (PLEG): container finished" podID="29fd4802-19c7-4e11-b776-c505c03206b0" containerID="66249ecbe2c3348c6acd48e9804c896c943d2119544945a4641b3cd22603525d" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.760997 4774 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.761248 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.761284 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-748c4cc85c-dkrhb: [secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.761730 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift podName:4dd9e6d7-d0b1-49f3-920a-34e434835bfa nodeName:}" failed. No retries permitted until 2025-11-21 14:28:16.761670231 +0000 UTC m=+1487.413869490 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift") pod "swift-proxy-748c4cc85c-dkrhb" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa") : [secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.795862 4774 generic.go:334] "Generic (PLEG): container finished" podID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerID="400661145f174c0b9169da8eed8077bb5592eced771c3239659552c524ba7eb9" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.796015 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7bf981c0-8ff6-493c-a5fc-14610df3b362","Type":"ContainerDied","Data":"400661145f174c0b9169da8eed8077bb5592eced771c3239659552c524ba7eb9"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.796276 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sdw4x_4549a9b5-fb19-4dae-9fee-b03d5d49e95d/openstack-network-exporter/0.log" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.796344 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.813677 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2 is running failed: container process not found" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.815985 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2 is running failed: container process not found" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.821570 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.822140 4774 generic.go:334] "Generic (PLEG): container finished" podID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerID="1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937" exitCode=0 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.822201 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" event={"ID":"f89a7785-0a49-4c28-a587-ec113d2f3635","Type":"ContainerDied","Data":"1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937"} Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.822236 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2 is running failed: container process not found" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" cmd=["/usr/bin/pidof","ovsdb-server"] Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.822261 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2 is running failed: container process not found" probeType="Readiness" pod="openstack/ovsdbserver-nb-0" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="ovsdbserver-nb" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.891111 4774 generic.go:334] "Generic (PLEG): container finished" podID="204761da-3cd3-4024-8268-2c4ade77be70" containerID="e1bece3865eab576d0537b2757e1996d3eb563738ed1cbdb0bbc09abd23a1ae4" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.891220 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9694746-ctlgk" event={"ID":"204761da-3cd3-4024-8268-2c4ade77be70","Type":"ContainerDied","Data":"e1bece3865eab576d0537b2757e1996d3eb563738ed1cbdb0bbc09abd23a1ae4"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.891286 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-cell1-galera-0" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="galera" containerID="cri-o://aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b" gracePeriod=30 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.896956 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0563658a-f1e8-4cae-b165-9697c4673895/ovsdbserver-sb/0.log" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.896995 4774 generic.go:334] "Generic (PLEG): container finished" podID="0563658a-f1e8-4cae-b165-9697c4673895" containerID="2c34ab166ca72aebd1fd6aa1a5cc31cb68ddf856a803c3f00aa0f1b318e937ff" exitCode=2 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.897009 4774 generic.go:334] "Generic (PLEG): container finished" podID="0563658a-f1e8-4cae-b165-9697c4673895" containerID="6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.897048 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0563658a-f1e8-4cae-b165-9697c4673895","Type":"ContainerDied","Data":"2c34ab166ca72aebd1fd6aa1a5cc31cb68ddf856a803c3f00aa0f1b318e937ff"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.897071 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0563658a-f1e8-4cae-b165-9697c4673895","Type":"ContainerDied","Data":"6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1"} Nov 21 14:28:15 crc kubenswrapper[4774]: E1121 14:28:15.902543 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-conmon-2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1718aee5_94ce_4682_aa62_28843ff1e2ef.slice/crio-95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf89a7785_0a49_4c28_a587_ec113d2f3635.slice/crio-conmon-1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb7f3cb4_269e_443e_836e_caae1c2d122f.slice/crio-conmon-ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb7f3cb4_269e_443e_836e_caae1c2d122f.slice/crio-ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-conmon-9bc27234572696e44f557a383a86c888ed805788bafa91dd14bb78cdefab3b32.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ae7ffea_af5d_4804_84cf_fa3c5edfbd27.slice/crio-conmon-1c051875890cc87d20e2ccc60014cd64e0c54c66081134927ca3c7218c65fef2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-conmon-89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod124a9a6f_df08_4085_96d6_0a72f2bb2855.slice/crio-conmon-08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0563658a_f1e8_4cae_b165_9697c4673895.slice/crio-conmon-6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-conmon-4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod612a4642_7af7_4d93_a27f_e63a0593a511.slice/crio-conmon-21b0a39b83253fe42307631f2014556d041f1359f887388fb2f8c11c1f9d769b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0563658a_f1e8_4cae_b165_9697c4673895.slice/crio-6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6cde8d60_bdf9_405f_8991_5c1f55b0ee76.slice/crio-89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf89a7785_0a49_4c28_a587_ec113d2f3635.slice/crio-1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7a5f9e1_9167_418e_8e1e_57e645d31785.slice/crio-conmon-1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1718aee5_94ce_4682_aa62_28843ff1e2ef.slice/crio-conmon-95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd.scope\": RecentStats: unable to find data in memory cache]" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.903661 4774 generic.go:334] "Generic (PLEG): container finished" podID="66b2a565-b48d-4b6f-8527-27326c13b522" containerID="dfbedbce9f436ba2d67af7321bdcc2d58c19cd6c6808e9ddda88da0a09afd9af" exitCode=137 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.925617 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db7f3cb4-269e-443e-836e-caae1c2d122f/ovsdbserver-nb/0.log" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.925667 4774 generic.go:334] "Generic (PLEG): container finished" podID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerID="f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79" exitCode=2 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.925729 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db7f3cb4-269e-443e-836e-caae1c2d122f","Type":"ContainerDied","Data":"f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.945409 4774 generic.go:334] "Generic (PLEG): container finished" podID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerID="95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.945838 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-576b48cd9b-wr2q7" event={"ID":"1718aee5-94ce-4682-aa62-28843ff1e2ef","Type":"ContainerDied","Data":"95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.967423 4774 generic.go:334] "Generic (PLEG): container finished" podID="36597581-6c3f-42a7-98ba-155d3bb19320" containerID="cf5874a27369c23eb02a38e945173e313d37d5eea273de5528c6e3f4c20042c1" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.967523 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"36597581-6c3f-42a7-98ba-155d3bb19320","Type":"ContainerDied","Data":"cf5874a27369c23eb02a38e945173e313d37d5eea273de5528c6e3f4c20042c1"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.970556 4774 generic.go:334] "Generic (PLEG): container finished" podID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerID="571c60de19b673d0a2cf6499c4d0ad765e15a4171ae78a3dcb8552bb3605e8e6" exitCode=143 Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.970593 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74459fb479-fkm77" event={"ID":"98c89c8e-6557-46b4-adf8-f954dfff68b3","Type":"ContainerDied","Data":"571c60de19b673d0a2cf6499c4d0ad765e15a4171ae78a3dcb8552bb3605e8e6"} Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979196 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-ovn-controller-tls-certs\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979353 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run-ovn\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979393 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-log-ovn\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979491 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klnwd\" (UniqueName: \"kubernetes.io/projected/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-kube-api-access-klnwd\") pod \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979540 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gk9c\" (UniqueName: \"kubernetes.io/projected/7ee04f12-987f-4f31-81b3-10cd067af310-kube-api-access-5gk9c\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979562 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-metrics-certs-tls-certs\") pod \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979628 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-combined-ca-bundle\") pod \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979659 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ee04f12-987f-4f31-81b3-10cd067af310-scripts\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979681 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-config\") pod \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979723 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovs-rundir\") pod \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979778 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979792 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovn-rundir\") pod \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\" (UID: \"4549a9b5-fb19-4dae-9fee-b03d5d49e95d\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.979839 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-combined-ca-bundle\") pod \"7ee04f12-987f-4f31-81b3-10cd067af310\" (UID: \"7ee04f12-987f-4f31-81b3-10cd067af310\") " Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.985011 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run" (OuterVolumeSpecName: "var-run") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.985111 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "4549a9b5-fb19-4dae-9fee-b03d5d49e95d" (UID: "4549a9b5-fb19-4dae-9fee-b03d5d49e95d"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.985136 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.985802 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-config" (OuterVolumeSpecName: "config") pod "4549a9b5-fb19-4dae-9fee-b03d5d49e95d" (UID: "4549a9b5-fb19-4dae-9fee-b03d5d49e95d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.986054 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "4549a9b5-fb19-4dae-9fee-b03d5d49e95d" (UID: "4549a9b5-fb19-4dae-9fee-b03d5d49e95d"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.986116 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.987257 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ee04f12-987f-4f31-81b3-10cd067af310-scripts" (OuterVolumeSpecName: "scripts") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:15 crc kubenswrapper[4774]: I1121 14:28:15.998485 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ee04f12-987f-4f31-81b3-10cd067af310-kube-api-access-5gk9c" (OuterVolumeSpecName: "kube-api-access-5gk9c") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "kube-api-access-5gk9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.021095 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-kube-api-access-klnwd" (OuterVolumeSpecName: "kube-api-access-klnwd") pod "4549a9b5-fb19-4dae-9fee-b03d5d49e95d" (UID: "4549a9b5-fb19-4dae-9fee-b03d5d49e95d"). InnerVolumeSpecName "kube-api-access-klnwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.026676 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.053114 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4549a9b5-fb19-4dae-9fee-b03d5d49e95d" (UID: "4549a9b5-fb19-4dae-9fee-b03d5d49e95d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086130 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klnwd\" (UniqueName: \"kubernetes.io/projected/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-kube-api-access-klnwd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086652 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gk9c\" (UniqueName: \"kubernetes.io/projected/7ee04f12-987f-4f31-81b3-10cd067af310-kube-api-access-5gk9c\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086665 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086675 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7ee04f12-987f-4f31-81b3-10cd067af310-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086705 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086718 4774 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovs-rundir\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086731 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086741 4774 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086750 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086759 4774 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.086785 4774 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7ee04f12-987f-4f31-81b3-10cd067af310-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.087229 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder5822-account-delete-7mbkj"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.110085 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7751c2da-9178-46c8-bd67-32bd9977eae4" path="/var/lib/kubelet/pods/7751c2da-9178-46c8-bd67-32bd9977eae4/volumes" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.111380 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3" path="/var/lib/kubelet/pods/7c61c77d-62d8-42af-b5ab-ddd92c4f3cf3/volumes" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.111975 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d14df5d7-bd15-499d-b228-e5b60f9f53fb" path="/var/lib/kubelet/pods/d14df5d7-bd15-499d-b228-e5b60f9f53fb/volumes" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.112472 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d99ed036-dfda-4b15-88b9-cdfed30626b5" path="/var/lib/kubelet/pods/d99ed036-dfda-4b15-88b9-cdfed30626b5/volumes" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.113778 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f30db86a-e756-4f9a-9691-1642d9678687" path="/var/lib/kubelet/pods/f30db86a-e756-4f9a-9691-1642d9678687/volumes" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.115340 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4cff911-3af8-45f1-b86a-d3629217b328" path="/var/lib/kubelet/pods/f4cff911-3af8-45f1-b86a-d3629217b328/volumes" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.120564 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "7ee04f12-987f-4f31-81b3-10cd067af310" (UID: "7ee04f12-987f-4f31-81b3-10cd067af310"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.193441 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee04f12-987f-4f31-81b3-10cd067af310-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.195899 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4zrq5"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.218067 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.218409 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="81a92903-9f60-4f44-917f-744a2b80a57c" containerName="nova-cell1-conductor-conductor" containerID="cri-o://464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678" gracePeriod=30 Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.288771 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-4zrq5"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.297578 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.298190 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" containerName="nova-cell0-conductor-conductor" containerID="cri-o://99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded" gracePeriod=30 Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.303105 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9vfmw"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.309578 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-9vfmw"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.371724 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "4549a9b5-fb19-4dae-9fee-b03d5d49e95d" (UID: "4549a9b5-fb19-4dae-9fee-b03d5d49e95d"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.372225 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance128f-account-delete-w4g2r"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.402042 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4549a9b5-fb19-4dae-9fee-b03d5d49e95d-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.402150 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.402212 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:28:18.402191177 +0000 UTC m=+1489.054390436 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:16 crc kubenswrapper[4774]: W1121 14:28:16.407954 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a3ae90b_73bb_4fbf_887b_c6e432338502.slice/crio-7e25a5af88034a826c29c8882746e3e52535762b02564e39400aa794644d46f9 WatchSource:0}: Error finding container 7e25a5af88034a826c29c8882746e3e52535762b02564e39400aa794644d46f9: Status 404 returned error can't find the container with id 7e25a5af88034a826c29c8882746e3e52535762b02564e39400aa794644d46f9 Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.548669 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0563658a-f1e8-4cae-b165-9697c4673895/ovsdbserver-sb/0.log" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.549217 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.562605 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.586946 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement2802-account-delete-ltg2g"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.647716 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-748c4cc85c-dkrhb"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.648049 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-748c4cc85c-dkrhb" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-httpd" containerID="cri-o://969fbf4f7d7b2be68e80f13bc613bae47954c1d9cf0870455b84d0a4bc6e18ef" gracePeriod=30 Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.648250 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/swift-proxy-748c4cc85c-dkrhb" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-server" containerID="cri-o://8dd78826a36d2a0c84007fefea4af9b39c62687e997e7371678a93eeba3ce1aa" gracePeriod=30 Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.662724 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican6523-account-delete-t2fnw"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.682647 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-cell1-novncproxy-0" podUID="e0f5811f-60f6-4820-b981-715448365e52" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.0.196:6080/vnc_lite.html\": dial tcp 10.217.0.196:6080: connect: connection refused" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.714806 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-sb\") pod \"f89a7785-0a49-4c28-a587-ec113d2f3635\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.714881 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0563658a-f1e8-4cae-b165-9697c4673895-ovsdb-rundir\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.714912 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-config\") pod \"f89a7785-0a49-4c28-a587-ec113d2f3635\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.714938 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-scripts\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.714973 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-metrics-certs-tls-certs\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.714997 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-swift-storage-0\") pod \"f89a7785-0a49-4c28-a587-ec113d2f3635\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715048 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-svc\") pod \"f89a7785-0a49-4c28-a587-ec113d2f3635\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715639 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-config\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715715 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-ovsdbserver-sb-tls-certs\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715793 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwffp\" (UniqueName: \"kubernetes.io/projected/0563658a-f1e8-4cae-b165-9697c4673895-kube-api-access-lwffp\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715832 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-nb\") pod \"f89a7785-0a49-4c28-a587-ec113d2f3635\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715871 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zblm\" (UniqueName: \"kubernetes.io/projected/f89a7785-0a49-4c28-a587-ec113d2f3635-kube-api-access-5zblm\") pod \"f89a7785-0a49-4c28-a587-ec113d2f3635\" (UID: \"f89a7785-0a49-4c28-a587-ec113d2f3635\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.715926 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-combined-ca-bundle\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.716045 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"0563658a-f1e8-4cae-b165-9697c4673895\" (UID: \"0563658a-f1e8-4cae-b165-9697c4673895\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.722543 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-config" (OuterVolumeSpecName: "config") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.735606 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-scripts" (OuterVolumeSpecName: "scripts") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.737722 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0563658a-f1e8-4cae-b165-9697c4673895-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.770423 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-748c4cc85c-dkrhb" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.0.165:8080/healthcheck\": read tcp 10.217.0.2:48758->10.217.0.165:8080: read: connection reset by peer" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.770876 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-748c4cc85c-dkrhb" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.165:8080/healthcheck\": read tcp 10.217.0.2:48766->10.217.0.165:8080: read: connection reset by peer" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.772983 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db7f3cb4-269e-443e-836e-caae1c2d122f/ovsdbserver-nb/0.log" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.773490 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.794676 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.796675 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novaapi242e-account-delete-tbszv"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.806254 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0563658a-f1e8-4cae-b165-9697c4673895-kube-api-access-lwffp" (OuterVolumeSpecName: "kube-api-access-lwffp") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "kube-api-access-lwffp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.823372 4774 projected.go:263] Couldn't get secret openstack/swift-proxy-config-data: secret "swift-proxy-config-data" not found Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.824080 4774 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.824093 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.824106 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-748c4cc85c-dkrhb: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:16 crc kubenswrapper[4774]: E1121 14:28:16.824178 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift podName:4dd9e6d7-d0b1-49f3-920a-34e434835bfa nodeName:}" failed. No retries permitted until 2025-11-21 14:28:18.824154861 +0000 UTC m=+1489.476354120 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift") pod "swift-proxy-748c4cc85c-dkrhb" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.824474 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f89a7785-0a49-4c28-a587-ec113d2f3635-kube-api-access-5zblm" (OuterVolumeSpecName: "kube-api-access-5zblm") pod "f89a7785-0a49-4c28-a587-ec113d2f3635" (UID: "f89a7785-0a49-4c28-a587-ec113d2f3635"). InnerVolumeSpecName "kube-api-access-5zblm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.824975 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.825067 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0563658a-f1e8-4cae-b165-9697c4673895-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.825085 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwffp\" (UniqueName: \"kubernetes.io/projected/0563658a-f1e8-4cae-b165-9697c4673895-kube-api-access-lwffp\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.825235 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zblm\" (UniqueName: \"kubernetes.io/projected/f89a7785-0a49-4c28-a587-ec113d2f3635-kube-api-access-5zblm\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.825264 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0563658a-f1e8-4cae-b165-9697c4673895-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.828007 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: W1121 14:28:16.908311 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod58b0c1b5_ec3b_4e7f_a2eb_78eacc9bc8ad.slice/crio-135851e48970ebaf5a4181ed44a5d612b81aa0f247e630e0d4aefafd44482dac WatchSource:0}: Error finding container 135851e48970ebaf5a4181ed44a5d612b81aa0f247e630e0d4aefafd44482dac: Status 404 returned error can't find the container with id 135851e48970ebaf5a4181ed44a5d612b81aa0f247e630e0d4aefafd44482dac Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.925709 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-metrics-certs-tls-certs\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.929897 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-config\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930029 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-combined-ca-bundle\") pod \"66b2a565-b48d-4b6f-8527-27326c13b522\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930207 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wg79\" (UniqueName: \"kubernetes.io/projected/db7f3cb4-269e-443e-836e-caae1c2d122f-kube-api-access-8wg79\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930335 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zmc9\" (UniqueName: \"kubernetes.io/projected/66b2a565-b48d-4b6f-8527-27326c13b522-kube-api-access-6zmc9\") pod \"66b2a565-b48d-4b6f-8527-27326c13b522\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930468 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdbserver-nb-tls-certs\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930564 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-scripts\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930665 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-combined-ca-bundle\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.930895 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config-secret\") pod \"66b2a565-b48d-4b6f-8527-27326c13b522\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.931006 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config\") pod \"66b2a565-b48d-4b6f-8527-27326c13b522\" (UID: \"66b2a565-b48d-4b6f-8527-27326c13b522\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.931566 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdb-rundir\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.931667 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"db7f3cb4-269e-443e-836e-caae1c2d122f\" (UID: \"db7f3cb4-269e-443e-836e-caae1c2d122f\") " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.938571 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-config" (OuterVolumeSpecName: "config") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.939227 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-scripts" (OuterVolumeSpecName: "scripts") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.940169 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.941046 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.941266 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7f3cb4-269e-443e-836e-caae1c2d122f-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.940555 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.942140 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66b2a565-b48d-4b6f-8527-27326c13b522-kube-api-access-6zmc9" (OuterVolumeSpecName: "kube-api-access-6zmc9") pod "66b2a565-b48d-4b6f-8527-27326c13b522" (UID: "66b2a565-b48d-4b6f-8527-27326c13b522"). InnerVolumeSpecName "kube-api-access-6zmc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.948437 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutronfb24-account-delete-z2nw8"] Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.951060 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:16 crc kubenswrapper[4774]: I1121 14:28:16.952986 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.006841 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db7f3cb4-269e-443e-836e-caae1c2d122f-kube-api-access-8wg79" (OuterVolumeSpecName: "kube-api-access-8wg79") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "kube-api-access-8wg79". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.036896 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037289 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037399 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037484 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="53a824add7ac0cc57042d70c06b911c7e6a34e1c2010603ee4d6fbc3ed438924" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037550 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="b0ff7c749c18817ad064c15649c712f4e89466819f6dd77e940ca84ed95e90a8" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037606 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="b02da81747033bd11ee8ad86892e420553f3c4e14b394a17b83ad199bf283c8e" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037703 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="78cf57423bdba8a0adb0930011b88d9283fb739e1b67a73287f7ff3ca582a4a1" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.037760 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="9bc27234572696e44f557a383a86c888ed805788bafa91dd14bb78cdefab3b32" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.036970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038015 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038037 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038049 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"53a824add7ac0cc57042d70c06b911c7e6a34e1c2010603ee4d6fbc3ed438924"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038062 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"b0ff7c749c18817ad064c15649c712f4e89466819f6dd77e940ca84ed95e90a8"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038074 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"b02da81747033bd11ee8ad86892e420553f3c4e14b394a17b83ad199bf283c8e"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038085 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"78cf57423bdba8a0adb0930011b88d9283fb739e1b67a73287f7ff3ca582a4a1"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.038095 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"9bc27234572696e44f557a383a86c888ed805788bafa91dd14bb78cdefab3b32"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.050965 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.051009 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.051020 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.051030 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wg79\" (UniqueName: \"kubernetes.io/projected/db7f3cb4-269e-443e-836e-caae1c2d122f-kube-api-access-8wg79\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.051040 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zmc9\" (UniqueName: \"kubernetes.io/projected/66b2a565-b48d-4b6f-8527-27326c13b522-kube-api-access-6zmc9\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.065631 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27","Type":"ContainerDied","Data":"1c051875890cc87d20e2ccc60014cd64e0c54c66081134927ca3c7218c65fef2"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.073476 4774 generic.go:334] "Generic (PLEG): container finished" podID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerID="1c051875890cc87d20e2ccc60014cd64e0c54c66081134927ca3c7218c65fef2" exitCode=143 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.099159 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sdw4x_4549a9b5-fb19-4dae-9fee-b03d5d49e95d/openstack-network-exporter/0.log" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.100468 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sdw4x" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.101593 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sdw4x" event={"ID":"4549a9b5-fb19-4dae-9fee-b03d5d49e95d","Type":"ContainerDied","Data":"eb45778d8c0a689a4260538abba14618f2fb4a419618f25ac6084c13abfb6151"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.101718 4774 scope.go:117] "RemoveContainer" containerID="ae41437905bab4b8a42a6e934b47544bc731aad356664fbc208508fb4483c6af" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.169214 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance128f-account-delete-w4g2r" event={"ID":"7a3ae90b-73bb-4fbf-887b-c6e432338502","Type":"ContainerStarted","Data":"7e25a5af88034a826c29c8882746e3e52535762b02564e39400aa794644d46f9"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.188764 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.207254 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-metrics-sdw4x"] Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.217674 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_0563658a-f1e8-4cae-b165-9697c4673895/ovsdbserver-sb/0.log" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.218348 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.218557 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"0563658a-f1e8-4cae-b165-9697c4673895","Type":"ContainerDied","Data":"34351f6a1d2eb3c687fdd7ca72f122de3499b79351c524d83e90ebcec1c1143a"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.218786 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-metrics-sdw4x"] Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.240408 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" event={"ID":"f89a7785-0a49-4c28-a587-ec113d2f3635","Type":"ContainerDied","Data":"9039f1b79fd3bb9f3fd9c2cd2365af0009ab0f999d41748d7258c8e87371def6"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.240551 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6868d89965-nrgpl" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.253721 4774 generic.go:334] "Generic (PLEG): container finished" podID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerID="1bed157d3f1b09ec22281912c29b9fe8e5b372b41ebbf607b1b08a4791141c7e" exitCode=143 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.253923 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" event={"ID":"8057ad05-b8c9-4742-a0e2-388f0a901595","Type":"ContainerDied","Data":"1bed157d3f1b09ec22281912c29b9fe8e5b372b41ebbf607b1b08a4791141c7e"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.272691 4774 generic.go:334] "Generic (PLEG): container finished" podID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerID="1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.272958 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5546774f69-cpnh7" event={"ID":"d7a5f9e1-9167-418e-8e1e-57e645d31785","Type":"ContainerDied","Data":"1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.282714 4774 generic.go:334] "Generic (PLEG): container finished" podID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerID="aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.283615 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8b17b723-7e23-4a12-916e-0f2d00b72239","Type":"ContainerDied","Data":"aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.289057 4774 generic.go:334] "Generic (PLEG): container finished" podID="612a4642-7af7-4d93-a27f-e63a0593a511" containerID="21b0a39b83253fe42307631f2014556d041f1359f887388fb2f8c11c1f9d769b" exitCode=143 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.289234 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"612a4642-7af7-4d93-a27f-e63a0593a511","Type":"ContainerDied","Data":"21b0a39b83253fe42307631f2014556d041f1359f887388fb2f8c11c1f9d769b"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.291618 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement2802-account-delete-ltg2g" event={"ID":"2bf5bbb4-9ebb-41b9-a888-4144660d088c","Type":"ContainerStarted","Data":"489020b01ad371ae7a8be2e9485117135c9a8683bcad8d1f98ee44c6bb852fca"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.294252 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.299141 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6523-account-delete-t2fnw" event={"ID":"79476096-5d34-4e8a-9f33-3127bacf4e60","Type":"ContainerStarted","Data":"ef6c0cf23a2d392647552b21088565764cbcc3e64b0777984f7a131de5f28567"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.304390 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_db7f3cb4-269e-443e-836e-caae1c2d122f/ovsdbserver-nb/0.log" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.304463 4774 generic.go:334] "Generic (PLEG): container finished" podID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" exitCode=143 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.304568 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db7f3cb4-269e-443e-836e-caae1c2d122f","Type":"ContainerDied","Data":"ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.304618 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"db7f3cb4-269e-443e-836e-caae1c2d122f","Type":"ContainerDied","Data":"3c0e65da300bee1fbf43896f5ef986ca282acfedd88e9aeaf9dd8a6a98064629"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.304740 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.309227 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5822-account-delete-7mbkj" event={"ID":"f7a1181b-900b-40dc-9855-795653215df3","Type":"ContainerStarted","Data":"38469690fa07fe07f776fd52a55363dcb7d1936d82c2fd1d99897be0174b2185"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.309322 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5822-account-delete-7mbkj" event={"ID":"f7a1181b-900b-40dc-9855-795653215df3","Type":"ContainerStarted","Data":"3ebbc2c15de6153027efa3d91f17c54d934ae0be150e065dda7f690059d8300e"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.316113 4774 generic.go:334] "Generic (PLEG): container finished" podID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.316257 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerDied","Data":"08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.324289 4774 generic.go:334] "Generic (PLEG): container finished" podID="e0f5811f-60f6-4820-b981-715448365e52" containerID="36dd18acf6da72cf687d626808b4dda1668438a188a9f1018c121f0a0c64d299" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.324374 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0f5811f-60f6-4820-b981-715448365e52","Type":"ContainerDied","Data":"36dd18acf6da72cf687d626808b4dda1668438a188a9f1018c121f0a0c64d299"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.348622 4774 scope.go:117] "RemoveContainer" containerID="2c34ab166ca72aebd1fd6aa1a5cc31cb68ddf856a803c3f00aa0f1b318e937ff" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.348847 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder5822-account-delete-7mbkj" podStartSLOduration=4.3487885649999996 podStartE2EDuration="4.348788565s" podCreationTimestamp="2025-11-21 14:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:28:17.325611215 +0000 UTC m=+1487.977810464" watchObservedRunningTime="2025-11-21 14:28:17.348788565 +0000 UTC m=+1488.000987824" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.355665 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-2sxpw" event={"ID":"7ee04f12-987f-4f31-81b3-10cd067af310","Type":"ContainerDied","Data":"2cf529dd42f1272162146cff715406bf4613ff943c312180b80c25fd82785d9b"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.355795 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw" Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.385552 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b is running failed: container process not found" containerID="aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.386130 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b is running failed: container process not found" containerID="aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.386376 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.387037 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b is running failed: container process not found" containerID="aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.387074 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b is running failed: container process not found" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="galera" Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.387096 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.387927 4774 generic.go:334] "Generic (PLEG): container finished" podID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerID="f284772158aa9afb2ac683ea5db800eb76ca8ee198f42ac67c00afb5d059483f" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.388003 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"75187d0f-77b0-45ee-a452-1850f0fe7851","Type":"ContainerDied","Data":"f284772158aa9afb2ac683ea5db800eb76ca8ee198f42ac67c00afb5d059483f"} Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.388056 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.388077 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.390598 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.390662 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data podName:64e33a39-c371-477f-b1c9-d58189db4bc8 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:21.390635846 +0000 UTC m=+1492.042835095 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data") pod "rabbitmq-cell1-server-0" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8") : configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.396789 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f89a7785-0a49-4c28-a587-ec113d2f3635" (UID: "f89a7785-0a49-4c28-a587-ec113d2f3635"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.408402 4774 generic.go:334] "Generic (PLEG): container finished" podID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerID="969fbf4f7d7b2be68e80f13bc613bae47954c1d9cf0870455b84d0a4bc6e18ef" exitCode=0 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.408531 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-748c4cc85c-dkrhb" event={"ID":"4dd9e6d7-d0b1-49f3-920a-34e434835bfa","Type":"ContainerDied","Data":"969fbf4f7d7b2be68e80f13bc613bae47954c1d9cf0870455b84d0a4bc6e18ef"} Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.412539 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi242e-account-delete-tbszv" event={"ID":"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad","Type":"ContainerStarted","Data":"135851e48970ebaf5a4181ed44a5d612b81aa0f247e630e0d4aefafd44482dac"} Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.447804 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.454327 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.477914 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:17 crc kubenswrapper[4774]: E1121 14:28:17.478019 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.504508 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.539374 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.559863 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f89a7785-0a49-4c28-a587-ec113d2f3635" (UID: "f89a7785-0a49-4c28-a587-ec113d2f3635"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.607074 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.607510 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.627769 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/novacell03102-account-delete-pwxhc"] Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.630317 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.696660 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.709323 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.709355 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: W1121 14:28:17.722547 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3311b6bd_a19b_402c_afe4_22222098c669.slice/crio-8557e9b8b109d70798eeacd9ade5c0706639c8124bf7a205542645d43c181df1 WatchSource:0}: Error finding container 8557e9b8b109d70798eeacd9ade5c0706639c8124bf7a205542645d43c181df1: Status 404 returned error can't find the container with id 8557e9b8b109d70798eeacd9ade5c0706639c8124bf7a205542645d43c181df1 Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.739721 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "66b2a565-b48d-4b6f-8527-27326c13b522" (UID: "66b2a565-b48d-4b6f-8527-27326c13b522"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.774116 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f89a7785-0a49-4c28-a587-ec113d2f3635" (UID: "f89a7785-0a49-4c28-a587-ec113d2f3635"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.794434 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66b2a565-b48d-4b6f-8527-27326c13b522" (UID: "66b2a565-b48d-4b6f-8527-27326c13b522"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.814188 4774 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.814231 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.814240 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.834305 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-config" (OuterVolumeSpecName: "config") pod "f89a7785-0a49-4c28-a587-ec113d2f3635" (UID: "f89a7785-0a49-4c28-a587-ec113d2f3635"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.835434 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f89a7785-0a49-4c28-a587-ec113d2f3635" (UID: "f89a7785-0a49-4c28-a587-ec113d2f3635"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.870516 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.914188 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.927866 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.927896 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.927906 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.927915 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f89a7785-0a49-4c28-a587-ec113d2f3635-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:17 crc kubenswrapper[4774]: I1121 14:28:17.982429 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "66b2a565-b48d-4b6f-8527-27326c13b522" (UID: "66b2a565-b48d-4b6f-8527-27326c13b522"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.003605 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "db7f3cb4-269e-443e-836e-caae1c2d122f" (UID: "db7f3cb4-269e-443e-836e-caae1c2d122f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.023892 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "0563658a-f1e8-4cae-b165-9697c4673895" (UID: "0563658a-f1e8-4cae-b165-9697c4673895"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.045460 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/66b2a565-b48d-4b6f-8527-27326c13b522-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.045512 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0563658a-f1e8-4cae-b165-9697c4673895-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.045523 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db7f3cb4-269e-443e-836e-caae1c2d122f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.142808 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f21c9fd-3364-4328-8717-c25f82fe8d02" path="/var/lib/kubelet/pods/3f21c9fd-3364-4328-8717-c25f82fe8d02/volumes" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.143991 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4549a9b5-fb19-4dae-9fee-b03d5d49e95d" path="/var/lib/kubelet/pods/4549a9b5-fb19-4dae-9fee-b03d5d49e95d/volumes" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.154331 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66b2a565-b48d-4b6f-8527-27326c13b522" path="/var/lib/kubelet/pods/66b2a565-b48d-4b6f-8527-27326c13b522/volumes" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.155159 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b13000ea-9b2c-47fe-aa5f-3e1de9f83511" path="/var/lib/kubelet/pods/b13000ea-9b2c-47fe-aa5f-3e1de9f83511/volumes" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.429233 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.167:8776/healthcheck\": read tcp 10.217.0.2:39946->10.217.0.167:8776: read: connection reset by peer" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.430929 4774 generic.go:334] "Generic (PLEG): container finished" podID="2bf5bbb4-9ebb-41b9-a888-4144660d088c" containerID="1e65322f1850f643444a1c4b348d8c5959eb5addc0d201ec1cb0390faccbede6" exitCode=0 Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.430978 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement2802-account-delete-ltg2g" event={"ID":"2bf5bbb4-9ebb-41b9-a888-4144660d088c","Type":"ContainerDied","Data":"1e65322f1850f643444a1c4b348d8c5959eb5addc0d201ec1cb0390faccbede6"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.444085 4774 generic.go:334] "Generic (PLEG): container finished" podID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerID="8dd78826a36d2a0c84007fefea4af9b39c62687e997e7371678a93eeba3ce1aa" exitCode=0 Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.444152 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-748c4cc85c-dkrhb" event={"ID":"4dd9e6d7-d0b1-49f3-920a-34e434835bfa","Type":"ContainerDied","Data":"8dd78826a36d2a0c84007fefea4af9b39c62687e997e7371678a93eeba3ce1aa"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.444181 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-748c4cc85c-dkrhb" event={"ID":"4dd9e6d7-d0b1-49f3-920a-34e434835bfa","Type":"ContainerDied","Data":"07fdc4d92fd456dd661517ac6b3f8e13a9e0883a4bda9fd068793831cd9ace5e"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.444191 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="07fdc4d92fd456dd661517ac6b3f8e13a9e0883a4bda9fd068793831cd9ace5e" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.462313 4774 generic.go:334] "Generic (PLEG): container finished" podID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" containerID="99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded" exitCode=0 Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.462405 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"531a66a5-f4c9-44f1-83a7-a3e4292fef52","Type":"ContainerDied","Data":"99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded"} Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.468773 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.469794 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:28:22.469773659 +0000 UTC m=+1493.121972928 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.490203 4774 generic.go:334] "Generic (PLEG): container finished" podID="f7a1181b-900b-40dc-9855-795653215df3" containerID="38469690fa07fe07f776fd52a55363dcb7d1936d82c2fd1d99897be0174b2185" exitCode=0 Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.490340 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5822-account-delete-7mbkj" event={"ID":"f7a1181b-900b-40dc-9855-795653215df3","Type":"ContainerDied","Data":"38469690fa07fe07f776fd52a55363dcb7d1936d82c2fd1d99897be0174b2185"} Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.490424 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded is running failed: container process not found" containerID="99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.498138 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded is running failed: container process not found" containerID="99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.499680 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded is running failed: container process not found" containerID="99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.499722 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" containerName="nova-cell0-conductor-conductor" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.500934 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfb24-account-delete-z2nw8" event={"ID":"650c7a92-1469-4a9c-9a60-a846fe7ed823","Type":"ContainerStarted","Data":"4992742ddc6a9a496a1fb0493b0cc7978879fde993035329bf760388243535c7"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.505599 4774 generic.go:334] "Generic (PLEG): container finished" podID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerID="cfabf58a3660a117c2e4bc8be1a895c8cb8999d2d918f92bec39bbee7161485e" exitCode=0 Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.505691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"75187d0f-77b0-45ee-a452-1850f0fe7851","Type":"ContainerDied","Data":"cfabf58a3660a117c2e4bc8be1a895c8cb8999d2d918f92bec39bbee7161485e"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.513024 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"8b17b723-7e23-4a12-916e-0f2d00b72239","Type":"ContainerDied","Data":"96e5350069d68cc9fb8fa727f566111d417236174d4f0857a9d8162adf07265d"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.513072 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96e5350069d68cc9fb8fa727f566111d417236174d4f0857a9d8162adf07265d" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.515175 4774 generic.go:334] "Generic (PLEG): container finished" podID="7a3ae90b-73bb-4fbf-887b-c6e432338502" containerID="6703fd3bafe778bc3a2dd40f769dd8e0a9d17470badf7480e9e3ba534863afae" exitCode=0 Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.515241 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance128f-account-delete-w4g2r" event={"ID":"7a3ae90b-73bb-4fbf-887b-c6e432338502","Type":"ContainerDied","Data":"6703fd3bafe778bc3a2dd40f769dd8e0a9d17470badf7480e9e3ba534863afae"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.516767 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell03102-account-delete-pwxhc" event={"ID":"3311b6bd-a19b-402c-afe4-22222098c669","Type":"ContainerStarted","Data":"8557e9b8b109d70798eeacd9ade5c0706639c8124bf7a205542645d43c181df1"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.531329 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0f5811f-60f6-4820-b981-715448365e52","Type":"ContainerDied","Data":"a5b8ada10b9a77eb9454b6acc14455cdf1878ee9df8d3f23a9b3ecc30a817659"} Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.531376 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5b8ada10b9a77eb9454b6acc14455cdf1878ee9df8d3f23a9b3ecc30a817659" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.888086 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": read tcp 10.217.0.2:49296->10.217.0.203:8775: read: connection reset by peer" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.888724 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": read tcp 10.217.0.2:49312->10.217.0.203:8775: read: connection reset by peer" Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.891035 4774 projected.go:263] Couldn't get secret openstack/swift-proxy-config-data: secret "swift-proxy-config-data" not found Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.891515 4774 projected.go:263] Couldn't get secret openstack/swift-conf: secret "swift-conf" not found Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.891525 4774 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.891537 4774 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-proxy-748c4cc85c-dkrhb: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:18 crc kubenswrapper[4774]: E1121 14:28:18.891579 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift podName:4dd9e6d7-d0b1-49f3-920a-34e434835bfa nodeName:}" failed. No retries permitted until 2025-11-21 14:28:22.891561647 +0000 UTC m=+1493.543760906 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift") pod "swift-proxy-748c4cc85c-dkrhb" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.980892 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-576b48cd9b-wr2q7" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:58262->10.217.0.161:9311: read: connection reset by peer" Nov 21 14:28:18 crc kubenswrapper[4774]: I1121 14:28:18.981239 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-576b48cd9b-wr2q7" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:58266->10.217.0.161:9311: read: connection reset by peer" Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.106779 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018 is running failed: container process not found" containerID="3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.108239 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018 is running failed: container process not found" containerID="3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.108521 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018 is running failed: container process not found" containerID="3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.108549 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" containerName="nova-scheduler-scheduler" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.535749 4774 scope.go:117] "RemoveContainer" containerID="6c283edbfdba4ea2bc344eb98e2d695dca6f87665150a1916b60b2a1675c71a1" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.623928 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"75187d0f-77b0-45ee-a452-1850f0fe7851","Type":"ContainerDied","Data":"b7c49052129643c386931d23bcd0eafc2f5eda9ba2208007bc2ea5f89298e0c5"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.623989 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7c49052129643c386931d23bcd0eafc2f5eda9ba2208007bc2ea5f89298e0c5" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.624193 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.627971 4774 generic.go:334] "Generic (PLEG): container finished" podID="612a4642-7af7-4d93-a27f-e63a0593a511" containerID="581678da81a51c7400fc4c31a5574369fb2025f18fff099bc0af08132f13654f" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.628157 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"612a4642-7af7-4d93-a27f-e63a0593a511","Type":"ContainerDied","Data":"581678da81a51c7400fc4c31a5574369fb2025f18fff099bc0af08132f13654f"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.631928 4774 generic.go:334] "Generic (PLEG): container finished" podID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerID="038ee870a823946f43bd1d652272038621a6567ad96155f489796343f86963d7" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.631984 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7bf981c0-8ff6-493c-a5fc-14610df3b362","Type":"ContainerDied","Data":"038ee870a823946f43bd1d652272038621a6567ad96155f489796343f86963d7"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.632198 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7bf981c0-8ff6-493c-a5fc-14610df3b362","Type":"ContainerDied","Data":"94071897eb8abe21f8bb237106f7040b688a521ef846a129b6e26c177a4810e6"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.632212 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94071897eb8abe21f8bb237106f7040b688a521ef846a129b6e26c177a4810e6" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.633994 4774 generic.go:334] "Generic (PLEG): container finished" podID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerID="f794e1beb2a4d2e0aa4f9c55c4bf3c19e6f4475d6330263d426714add8939453" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.634030 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27","Type":"ContainerDied","Data":"f794e1beb2a4d2e0aa4f9c55c4bf3c19e6f4475d6330263d426714add8939453"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.644581 4774 generic.go:334] "Generic (PLEG): container finished" podID="29fd4802-19c7-4e11-b776-c505c03206b0" containerID="08dcf92110aca28bb33e09d2cf80555b027cc58cf28e0ba6099d79517b3e3e96" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.644653 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"29fd4802-19c7-4e11-b776-c505c03206b0","Type":"ContainerDied","Data":"08dcf92110aca28bb33e09d2cf80555b027cc58cf28e0ba6099d79517b3e3e96"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.664512 4774 scope.go:117] "RemoveContainer" containerID="1d66c74501f2c450d7d07143b74c86af7aad69e0eb9bc0e1631a18ed7cbe8937" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.687660 4774 generic.go:334] "Generic (PLEG): container finished" podID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" containerID="3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.687772 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0","Type":"ContainerDied","Data":"3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.697969 4774 generic.go:334] "Generic (PLEG): container finished" podID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerID="1a0a15cfd145eaf76485365a7148d16577e73bc0add7da74f8b15ec9b79a5303" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.698101 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-576b48cd9b-wr2q7" event={"ID":"1718aee5-94ce-4682-aa62-28843ff1e2ef","Type":"ContainerDied","Data":"1a0a15cfd145eaf76485365a7148d16577e73bc0add7da74f8b15ec9b79a5303"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.712143 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.712572 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-central-agent" containerID="cri-o://a4a27b3c5077e95426b1db0a18c43f2ded3d18629d74ecfdf80ae409e2215348" gracePeriod=30 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.712754 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"531a66a5-f4c9-44f1-83a7-a3e4292fef52","Type":"ContainerDied","Data":"32a8ff46a80cd8b2653390e9bc5dc5bc1ef3f604f02adbd3cea1ddad69a05e2c"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.713002 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32a8ff46a80cd8b2653390e9bc5dc5bc1ef3f604f02adbd3cea1ddad69a05e2c" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.713132 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="proxy-httpd" containerID="cri-o://b6dead7e6b8a9edca06d5258569ba83c012500d4af7950b97020473ff90fca2b" gracePeriod=30 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.713330 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-notification-agent" containerID="cri-o://7d2c6e460846a332f45e2cd1fd8b4211e1fef71fdaba2c330e61b8c2240fa3a3" gracePeriod=30 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.713413 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="sg-core" containerID="cri-o://ea9ca71244bc7f1b46eba74e6204643a6aa38bccdbe0a89dc25f4da34716f6b9" gracePeriod=30 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.718803 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-combined-ca-bundle\") pod \"e0f5811f-60f6-4820-b981-715448365e52\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.718892 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-config-data\") pod \"e0f5811f-60f6-4820-b981-715448365e52\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.718994 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4kfn\" (UniqueName: \"kubernetes.io/projected/e0f5811f-60f6-4820-b981-715448365e52-kube-api-access-x4kfn\") pod \"e0f5811f-60f6-4820-b981-715448365e52\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.719026 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-vencrypt-tls-certs\") pod \"e0f5811f-60f6-4820-b981-715448365e52\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.719168 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-nova-novncproxy-tls-certs\") pod \"e0f5811f-60f6-4820-b981-715448365e52\" (UID: \"e0f5811f-60f6-4820-b981-715448365e52\") " Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.766659 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.770447 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="424dddc1-7019-40ab-b405-a2dcaee08c65" containerName="kube-state-metrics" containerID="cri-o://f7e24f67518e454751426c3c5dc72df1fbe276fbaaac5b326b29c0ee877432a8" gracePeriod=30 Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.773939 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.786032 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0f5811f-60f6-4820-b981-715448365e52-kube-api-access-x4kfn" (OuterVolumeSpecName: "kube-api-access-x4kfn") pod "e0f5811f-60f6-4820-b981-715448365e52" (UID: "e0f5811f-60f6-4820-b981-715448365e52"). InnerVolumeSpecName "kube-api-access-x4kfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.791210 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.795357 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:19 crc kubenswrapper[4774]: E1121 14:28:19.795498 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.798151 4774 generic.go:334] "Generic (PLEG): container finished" podID="36597581-6c3f-42a7-98ba-155d3bb19320" containerID="c3d2080c4d1517a927737cdfe470200b33ed1dfc064dd0c21a2afa217e1ea935" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.798294 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"36597581-6c3f-42a7-98ba-155d3bb19320","Type":"ContainerDied","Data":"c3d2080c4d1517a927737cdfe470200b33ed1dfc064dd0c21a2afa217e1ea935"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.798390 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"36597581-6c3f-42a7-98ba-155d3bb19320","Type":"ContainerDied","Data":"def4a4ed1a20cd4df9962234216a7423ad43403aca5dc81f3f3b8c1accd42945"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.798465 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="def4a4ed1a20cd4df9962234216a7423ad43403aca5dc81f3f3b8c1accd42945" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.800515 4774 generic.go:334] "Generic (PLEG): container finished" podID="204761da-3cd3-4024-8268-2c4ade77be70" containerID="cc1d60dd83d00832b380eb3c950ba9940eb8e75dc9cfe60f03f0990330129de2" exitCode=0 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.800749 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9694746-ctlgk" event={"ID":"204761da-3cd3-4024-8268-2c4ade77be70","Type":"ContainerDied","Data":"cc1d60dd83d00832b380eb3c950ba9940eb8e75dc9cfe60f03f0990330129de2"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.800879 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8d9694746-ctlgk" event={"ID":"204761da-3cd3-4024-8268-2c4ade77be70","Type":"ContainerDied","Data":"2cc7c80b89b553cb23dac5a76f39501cb8a1850828a8f174e0938896be6dac43"} Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.800943 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cc7c80b89b553cb23dac5a76f39501cb8a1850828a8f174e0938896be6dac43" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.824173 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4kfn\" (UniqueName: \"kubernetes.io/projected/e0f5811f-60f6-4820-b981-715448365e52-kube-api-access-x4kfn\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.881467 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.882113 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/memcached-0" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" containerName="memcached" containerID="cri-o://69c2ce68633246110d46b63e32f22397b08e5e0b28d2e21b8332046a0b226d6e" gracePeriod=30 Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.983313 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-mpj9v"] Nov 21 14:28:19 crc kubenswrapper[4774]: I1121 14:28:19.997066 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0f5811f-60f6-4820-b981-715448365e52" (UID: "e0f5811f-60f6-4820-b981-715448365e52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.029895 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tjdsz"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.031792 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.064924 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-mpj9v"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.088365 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "e0f5811f-60f6-4820-b981-715448365e52" (UID: "e0f5811f-60f6-4820-b981-715448365e52"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.090946 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tjdsz"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.123197 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-config-data" (OuterVolumeSpecName: "config-data") pod "e0f5811f-60f6-4820-b981-715448365e52" (UID: "e0f5811f-60f6-4820-b981-715448365e52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: E1121 14:28:20.132736 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678 is running failed: container process not found" containerID="464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.158753 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "e0f5811f-60f6-4820-b981-715448365e52" (UID: "e0f5811f-60f6-4820-b981-715448365e52"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: E1121 14:28:20.165734 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678 is running failed: container process not found" containerID="464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 14:28:20 crc kubenswrapper[4774]: E1121 14:28:20.172658 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678 is running failed: container process not found" containerID="464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 14:28:20 crc kubenswrapper[4774]: E1121 14:28:20.172743 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="81a92903-9f60-4f44-917f-744a2b80a57c" containerName="nova-cell1-conductor-conductor" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.190851 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf55e528-0d45-4c04-8f50-674d2b40625c" path="/var/lib/kubelet/pods/cf55e528-0d45-4c04-8f50-674d2b40625c/volumes" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.191566 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df167c70-6bf6-4221-ac4d-fe967e1abaac" path="/var/lib/kubelet/pods/df167c70-6bf6-4221-ac4d-fe967e1abaac/volumes" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.193575 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b76744b8b-5ws6g"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.193604 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.193746 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-zjwrf"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.193762 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-zjwrf"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.199996 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-88ef-account-create-hlvhs"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.200546 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/keystone-b76744b8b-5ws6g" podUID="57cdbc4f-20e9-4189-872d-f6f3c58f7093" containerName="keystone-api" containerID="cri-o://aea70590e231f1b48851f4ffa1e6852272819cf991813022fe7bff259b0f4d04" gracePeriod=30 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.205525 4774 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.205630 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.205713 4774 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0f5811f-60f6-4820-b981-715448365e52-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.254197 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-88ef-account-create-hlvhs"] Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.706097 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstack-galera-0" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerName="galera" containerID="cri-o://a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06" gracePeriod=30 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.725181 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.739108 4774 scope.go:117] "RemoveContainer" containerID="146696e90c13f79100e0e356f29802f45ab9194b43414372cba67548f83118f8" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.760471 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.760641 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-generated\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.760776 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-kolla-config\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.760803 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-combined-ca-bundle\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.760842 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-operator-scripts\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.760865 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6zhg\" (UniqueName: \"kubernetes.io/projected/8b17b723-7e23-4a12-916e-0f2d00b72239-kube-api-access-j6zhg\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.761350 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.761525 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.761550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-default\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.761662 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-galera-tls-certs\") pod \"8b17b723-7e23-4a12-916e-0f2d00b72239\" (UID: \"8b17b723-7e23-4a12-916e-0f2d00b72239\") " Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.762096 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.762932 4774 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.762951 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.762962 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/8b17b723-7e23-4a12-916e-0f2d00b72239-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.764477 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.819604 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b17b723-7e23-4a12-916e-0f2d00b72239-kube-api-access-j6zhg" (OuterVolumeSpecName: "kube-api-access-j6zhg") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "kube-api-access-j6zhg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.844292 4774 generic.go:334] "Generic (PLEG): container finished" podID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerID="72732cc3c72816545f8f6bd38e3894a25c914501104260c3cdc2219287bc3e97" exitCode=0 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.844369 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" event={"ID":"8057ad05-b8c9-4742-a0e2-388f0a901595","Type":"ContainerDied","Data":"72732cc3c72816545f8f6bd38e3894a25c914501104260c3cdc2219287bc3e97"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.849899 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-576b48cd9b-wr2q7" event={"ID":"1718aee5-94ce-4682-aa62-28843ff1e2ef","Type":"ContainerDied","Data":"8f073fc66f16b8911daaf68519635469730b95292fc8f8051fe20535c85a2b4c"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.849947 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f073fc66f16b8911daaf68519635469730b95292fc8f8051fe20535c85a2b4c" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.856951 4774 generic.go:334] "Generic (PLEG): container finished" podID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerID="123168316f49f1c892f63242e19f929cf760d9f3f3bdcc32a34469541b54b183" exitCode=0 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.857042 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74459fb479-fkm77" event={"ID":"98c89c8e-6557-46b4-adf8-f954dfff68b3","Type":"ContainerDied","Data":"123168316f49f1c892f63242e19f929cf760d9f3f3bdcc32a34469541b54b183"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.861067 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27","Type":"ContainerDied","Data":"9336da1bc8d59fe9dd4c3f70da38a1f80e302a29df83f9a6ad5fe0399f289007"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.861105 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9336da1bc8d59fe9dd4c3f70da38a1f80e302a29df83f9a6ad5fe0399f289007" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.864723 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b17b723-7e23-4a12-916e-0f2d00b72239-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.864967 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6zhg\" (UniqueName: \"kubernetes.io/projected/8b17b723-7e23-4a12-916e-0f2d00b72239-kube-api-access-j6zhg\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.866686 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0","Type":"ContainerDied","Data":"ce7bc79cbe4d96ae8efafc1514532f5d9f01a6345051d4f611053214fc2380c1"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.866726 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce7bc79cbe4d96ae8efafc1514532f5d9f01a6345051d4f611053214fc2380c1" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.871809 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "mysql-db") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.873700 4774 generic.go:334] "Generic (PLEG): container finished" podID="81a92903-9f60-4f44-917f-744a2b80a57c" containerID="464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678" exitCode=0 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.873774 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"81a92903-9f60-4f44-917f-744a2b80a57c","Type":"ContainerDied","Data":"464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.875769 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement2802-account-delete-ltg2g" event={"ID":"2bf5bbb4-9ebb-41b9-a888-4144660d088c","Type":"ContainerDied","Data":"489020b01ad371ae7a8be2e9485117135c9a8683bcad8d1f98ee44c6bb852fca"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.875812 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="489020b01ad371ae7a8be2e9485117135c9a8683bcad8d1f98ee44c6bb852fca" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.878162 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerID="b6dead7e6b8a9edca06d5258569ba83c012500d4af7950b97020473ff90fca2b" exitCode=0 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.878182 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerID="ea9ca71244bc7f1b46eba74e6204643a6aa38bccdbe0a89dc25f4da34716f6b9" exitCode=2 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.878191 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerID="a4a27b3c5077e95426b1db0a18c43f2ded3d18629d74ecfdf80ae409e2215348" exitCode=0 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.878228 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerDied","Data":"b6dead7e6b8a9edca06d5258569ba83c012500d4af7950b97020473ff90fca2b"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.878247 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerDied","Data":"ea9ca71244bc7f1b46eba74e6204643a6aa38bccdbe0a89dc25f4da34716f6b9"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.878313 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerDied","Data":"a4a27b3c5077e95426b1db0a18c43f2ded3d18629d74ecfdf80ae409e2215348"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.880391 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"612a4642-7af7-4d93-a27f-e63a0593a511","Type":"ContainerDied","Data":"48bb40685a8dc40e39ac70ae809f434980afb59629dacaf9d97f1151a65ca685"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.880408 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48bb40685a8dc40e39ac70ae809f434980afb59629dacaf9d97f1151a65ca685" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.883425 4774 generic.go:334] "Generic (PLEG): container finished" podID="424dddc1-7019-40ab-b405-a2dcaee08c65" containerID="f7e24f67518e454751426c3c5dc72df1fbe276fbaaac5b326b29c0ee877432a8" exitCode=2 Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.883466 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"424dddc1-7019-40ab-b405-a2dcaee08c65","Type":"ContainerDied","Data":"f7e24f67518e454751426c3c5dc72df1fbe276fbaaac5b326b29c0ee877432a8"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.885385 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.886227 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder5822-account-delete-7mbkj" event={"ID":"f7a1181b-900b-40dc-9855-795653215df3","Type":"ContainerDied","Data":"3ebbc2c15de6153027efa3d91f17c54d934ae0be150e065dda7f690059d8300e"} Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.886252 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ebbc2c15de6153027efa3d91f17c54d934ae0be150e065dda7f690059d8300e" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.886316 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.926507 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.983068 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:20 crc kubenswrapper[4774]: I1121 14:28:20.983130 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.049997 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "8b17b723-7e23-4a12-916e-0f2d00b72239" (UID: "8b17b723-7e23-4a12-916e-0f2d00b72239"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.084684 4774 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b17b723-7e23-4a12-916e-0f2d00b72239-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.090164 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.182227 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.187488 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.219484 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.255960 4774 scope.go:117] "RemoveContainer" containerID="dfbedbce9f436ba2d67af7321bdcc2d58c19cd6c6808e9ddda88da0a09afd9af" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.264558 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.275605 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.281651 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.291752 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mn56k\" (UniqueName: \"kubernetes.io/projected/75187d0f-77b0-45ee-a452-1850f0fe7851-kube-api-access-mn56k\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292059 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292104 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-public-tls-certs\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292134 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data-custom\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292155 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75187d0f-77b0-45ee-a452-1850f0fe7851-etc-machine-id\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292173 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292211 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292239 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-log-httpd\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292262 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-run-httpd\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292305 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-scripts\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292342 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-combined-ca-bundle\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292371 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-internal-tls-certs\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292394 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-config-data\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.292418 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlm8b\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-kube-api-access-nlm8b\") pod \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\" (UID: \"4dd9e6d7-d0b1-49f3-920a-34e434835bfa\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.304608 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75187d0f-77b0-45ee-a452-1850f0fe7851-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.306395 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.307656 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.311122 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.329429 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.330761 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.334201 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75187d0f-77b0-45ee-a452-1850f0fe7851-kube-api-access-mn56k" (OuterVolumeSpecName: "kube-api-access-mn56k") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "kube-api-access-mn56k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.341991 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.342124 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.342986 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.347073 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-kube-api-access-nlm8b" (OuterVolumeSpecName: "kube-api-access-nlm8b") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "kube-api-access-nlm8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.347256 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6868d89965-nrgpl"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.354535 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6868d89965-nrgpl"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.356195 4774 scope.go:117] "RemoveContainer" containerID="f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.357446 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.359610 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-scripts" (OuterVolumeSpecName: "scripts") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.362084 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.379491 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.386632 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395042 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-combined-ca-bundle\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395088 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-internal-tls-certs\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395113 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395172 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/204761da-3cd3-4024-8268-2c4ade77be70-logs\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395191 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-config-data\") pod \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395214 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkfm5\" (UniqueName: \"kubernetes.io/projected/7bf981c0-8ff6-493c-a5fc-14610df3b362-kube-api-access-bkfm5\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395236 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395285 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcxpx\" (UniqueName: \"kubernetes.io/projected/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-kube-api-access-mcxpx\") pod \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395318 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-logs\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395350 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-config-data\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395369 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-public-tls-certs\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395406 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-internal-tls-certs\") pod \"612a4642-7af7-4d93-a27f-e63a0593a511\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395424 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-public-tls-certs\") pod \"612a4642-7af7-4d93-a27f-e63a0593a511\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395455 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bf981c0-8ff6-493c-a5fc-14610df3b362-logs\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395457 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.395484 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88thd\" (UniqueName: \"kubernetes.io/projected/36597581-6c3f-42a7-98ba-155d3bb19320-kube-api-access-88thd\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396162 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-combined-ca-bundle\") pod \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\" (UID: \"ad456e3b-04a1-48d6-8fbc-39e3faa00aa0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396192 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-public-tls-certs\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396274 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgqqf\" (UniqueName: \"kubernetes.io/projected/204761da-3cd3-4024-8268-2c4ade77be70-kube-api-access-kgqqf\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396318 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-internal-tls-certs\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396344 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-config-data\") pod \"612a4642-7af7-4d93-a27f-e63a0593a511\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396370 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-combined-ca-bundle\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396415 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-scripts\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396472 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-scripts\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396494 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-combined-ca-bundle\") pod \"612a4642-7af7-4d93-a27f-e63a0593a511\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396515 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-httpd-run\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396608 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-config-data\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396629 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-public-tls-certs\") pod \"204761da-3cd3-4024-8268-2c4ade77be70\" (UID: \"204761da-3cd3-4024-8268-2c4ade77be70\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396651 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396680 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-combined-ca-bundle\") pod \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396701 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-config-data\") pod \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396732 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h7xw\" (UniqueName: \"kubernetes.io/projected/531a66a5-f4c9-44f1-83a7-a3e4292fef52-kube-api-access-4h7xw\") pod \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\" (UID: \"531a66a5-f4c9-44f1-83a7-a3e4292fef52\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396780 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612a4642-7af7-4d93-a27f-e63a0593a511-logs\") pod \"612a4642-7af7-4d93-a27f-e63a0593a511\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396807 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klqkd\" (UniqueName: \"kubernetes.io/projected/612a4642-7af7-4d93-a27f-e63a0593a511-kube-api-access-klqkd\") pod \"612a4642-7af7-4d93-a27f-e63a0593a511\" (UID: \"612a4642-7af7-4d93-a27f-e63a0593a511\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396950 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7bf981c0-8ff6-493c-a5fc-14610df3b362-etc-machine-id\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.396997 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data-custom\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397896 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mn56k\" (UniqueName: \"kubernetes.io/projected/75187d0f-77b0-45ee-a452-1850f0fe7851-kube-api-access-mn56k\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397909 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397920 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75187d0f-77b0-45ee-a452-1850f0fe7851-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397929 4774 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397940 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397950 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397962 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.397972 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlm8b\" (UniqueName: \"kubernetes.io/projected/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-kube-api-access-nlm8b\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: E1121 14:28:21.398087 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:21 crc kubenswrapper[4774]: E1121 14:28:21.398152 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data podName:64e33a39-c371-477f-b1c9-d58189db4bc8 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:29.398135594 +0000 UTC m=+1500.050334853 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data") pod "rabbitmq-cell1-server-0" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8") : configmap "rabbitmq-cell1-config-data" not found Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.409160 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bf981c0-8ff6-493c-a5fc-14610df3b362-logs" (OuterVolumeSpecName: "logs") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.409555 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/204761da-3cd3-4024-8268-2c4ade77be70-logs" (OuterVolumeSpecName: "logs") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.413018 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-logs" (OuterVolumeSpecName: "logs") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.428540 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.430304 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.441759 4774 scope.go:117] "RemoveContainer" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.444494 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/612a4642-7af7-4d93-a27f-e63a0593a511-logs" (OuterVolumeSpecName: "logs") pod "612a4642-7af7-4d93-a27f-e63a0593a511" (UID: "612a4642-7af7-4d93-a27f-e63a0593a511"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.445963 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.446528 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7bf981c0-8ff6-493c-a5fc-14610df3b362-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.450157 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499470 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-public-tls-certs\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499509 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499593 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-config-data\") pod \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499618 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-nova-metadata-tls-certs\") pod \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499700 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s47cc\" (UniqueName: \"kubernetes.io/projected/2bf5bbb4-9ebb-41b9-a888-4144660d088c-kube-api-access-s47cc\") pod \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499723 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-combined-ca-bundle\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.499802 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7xt4\" (UniqueName: \"kubernetes.io/projected/1718aee5-94ce-4682-aa62-28843ff1e2ef-kube-api-access-b7xt4\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.508315 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts" (OuterVolumeSpecName: "scripts") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.510502 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521081 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-logs\") pod \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521164 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-internal-tls-certs\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521295 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data-custom\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521348 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1718aee5-94ce-4682-aa62-28843ff1e2ef-logs\") pod \"1718aee5-94ce-4682-aa62-28843ff1e2ef\" (UID: \"1718aee5-94ce-4682-aa62-28843ff1e2ef\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521398 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bf5bbb4-9ebb-41b9-a888-4144660d088c-operator-scripts\") pod \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\" (UID: \"2bf5bbb4-9ebb-41b9-a888-4144660d088c\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521438 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-combined-ca-bundle\") pod \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521559 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts\") pod \"36597581-6c3f-42a7-98ba-155d3bb19320\" (UID: \"36597581-6c3f-42a7-98ba-155d3bb19320\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.521587 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqmtn\" (UniqueName: \"kubernetes.io/projected/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-kube-api-access-kqmtn\") pod \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\" (UID: \"4ae7ffea-af5d-4804-84cf-fa3c5edfbd27\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.522597 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7bf981c0-8ff6-493c-a5fc-14610df3b362-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.522618 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.522631 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/612a4642-7af7-4d93-a27f-e63a0593a511-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.522642 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7bf981c0-8ff6-493c-a5fc-14610df3b362-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.522657 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/204761da-3cd3-4024-8268-2c4ade77be70-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.522671 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36597581-6c3f-42a7-98ba-155d3bb19320-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.528700 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1718aee5-94ce-4682-aa62-28843ff1e2ef-logs" (OuterVolumeSpecName: "logs") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.533033 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-logs" (OuterVolumeSpecName: "logs") pod "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" (UID: "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.533461 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:21 crc kubenswrapper[4774]: W1121 14:28:21.533914 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/36597581-6c3f-42a7-98ba-155d3bb19320/volumes/kubernetes.io~secret/scripts Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.533922 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts" (OuterVolumeSpecName: "scripts") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.534186 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.549008 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bf5bbb4-9ebb-41b9-a888-4144660d088c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2bf5bbb4-9ebb-41b9-a888-4144660d088c" (UID: "2bf5bbb4-9ebb-41b9-a888-4144660d088c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.561879 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.563114 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.565101 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-scripts" (OuterVolumeSpecName: "scripts") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.565254 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.582905 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36597581-6c3f-42a7-98ba-155d3bb19320-kube-api-access-88thd" (OuterVolumeSpecName: "kube-api-access-88thd") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "kube-api-access-88thd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.584534 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bf981c0-8ff6-493c-a5fc-14610df3b362-kube-api-access-bkfm5" (OuterVolumeSpecName: "kube-api-access-bkfm5") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "kube-api-access-bkfm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.587332 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-scripts" (OuterVolumeSpecName: "scripts") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.588026 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/531a66a5-f4c9-44f1-83a7-a3e4292fef52-kube-api-access-4h7xw" (OuterVolumeSpecName: "kube-api-access-4h7xw") pod "531a66a5-f4c9-44f1-83a7-a3e4292fef52" (UID: "531a66a5-f4c9-44f1-83a7-a3e4292fef52"). InnerVolumeSpecName "kube-api-access-4h7xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.592048 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-kube-api-access-mcxpx" (OuterVolumeSpecName: "kube-api-access-mcxpx") pod "ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" (UID: "ad456e3b-04a1-48d6-8fbc-39e3faa00aa0"). InnerVolumeSpecName "kube-api-access-mcxpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.592796 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/204761da-3cd3-4024-8268-2c4ade77be70-kube-api-access-kgqqf" (OuterVolumeSpecName: "kube-api-access-kgqqf") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "kube-api-access-kgqqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.593498 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/612a4642-7af7-4d93-a27f-e63a0593a511-kube-api-access-klqkd" (OuterVolumeSpecName: "kube-api-access-klqkd") pod "612a4642-7af7-4d93-a27f-e63a0593a511" (UID: "612a4642-7af7-4d93-a27f-e63a0593a511"). InnerVolumeSpecName "kube-api-access-klqkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.593531 4774 scope.go:117] "RemoveContainer" containerID="f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79" Nov 21 14:28:21 crc kubenswrapper[4774]: E1121 14:28:21.594431 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79\": container with ID starting with f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79 not found: ID does not exist" containerID="f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.594465 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79"} err="failed to get container status \"f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79\": rpc error: code = NotFound desc = could not find container \"f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79\": container with ID starting with f8ca38459c422314641e0cbd7d565f567766233170bf6902b75b68ce3400ee79 not found: ID does not exist" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.594494 4774 scope.go:117] "RemoveContainer" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" Nov 21 14:28:21 crc kubenswrapper[4774]: E1121 14:28:21.596278 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2\": container with ID starting with ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2 not found: ID does not exist" containerID="ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.596300 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2"} err="failed to get container status \"ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2\": rpc error: code = NotFound desc = could not find container \"ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2\": container with ID starting with ae1b1656e607fb5369896d3582e6a47f822d350694dd5339b4745def70a206a2 not found: ID does not exist" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.596316 4774 scope.go:117] "RemoveContainer" containerID="5835b1e71040c97609879beff0fc752dee4bbaaacdb26af845a02a7a42242f5c" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.596569 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1718aee5-94ce-4682-aa62-28843ff1e2ef-kube-api-access-b7xt4" (OuterVolumeSpecName: "kube-api-access-b7xt4") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "kube-api-access-b7xt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.605545 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-kube-api-access-kqmtn" (OuterVolumeSpecName: "kube-api-access-kqmtn") pod "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" (UID: "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27"). InnerVolumeSpecName "kube-api-access-kqmtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.609147 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bf5bbb4-9ebb-41b9-a888-4144660d088c-kube-api-access-s47cc" (OuterVolumeSpecName: "kube-api-access-s47cc") pod "2bf5bbb4-9ebb-41b9-a888-4144660d088c" (UID: "2bf5bbb4-9ebb-41b9-a888-4144660d088c"). InnerVolumeSpecName "kube-api-access-s47cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.611071 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.620069 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624013 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a3ae90b-73bb-4fbf-887b-c6e432338502-operator-scripts\") pod \"7a3ae90b-73bb-4fbf-887b-c6e432338502\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624099 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mc44z\" (UniqueName: \"kubernetes.io/projected/7a3ae90b-73bb-4fbf-887b-c6e432338502-kube-api-access-mc44z\") pod \"7a3ae90b-73bb-4fbf-887b-c6e432338502\" (UID: \"7a3ae90b-73bb-4fbf-887b-c6e432338502\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624138 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-combined-ca-bundle\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624206 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9w9g\" (UniqueName: \"kubernetes.io/projected/81a92903-9f60-4f44-917f-744a2b80a57c-kube-api-access-r9w9g\") pod \"81a92903-9f60-4f44-917f-744a2b80a57c\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624255 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-config-data\") pod \"81a92903-9f60-4f44-917f-744a2b80a57c\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624419 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-config-data\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624456 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pg5z\" (UniqueName: \"kubernetes.io/projected/29fd4802-19c7-4e11-b776-c505c03206b0-kube-api-access-2pg5z\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624635 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624698 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a1181b-900b-40dc-9855-795653215df3-operator-scripts\") pod \"f7a1181b-900b-40dc-9855-795653215df3\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.624790 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-scripts\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.643995 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7a1181b-900b-40dc-9855-795653215df3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f7a1181b-900b-40dc-9855-795653215df3" (UID: "f7a1181b-900b-40dc-9855-795653215df3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.649235 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-logs\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.649302 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdgw5\" (UniqueName: \"kubernetes.io/projected/f7a1181b-900b-40dc-9855-795653215df3-kube-api-access-kdgw5\") pod \"f7a1181b-900b-40dc-9855-795653215df3\" (UID: \"f7a1181b-900b-40dc-9855-795653215df3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.649437 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-internal-tls-certs\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.649465 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-combined-ca-bundle\") pod \"81a92903-9f60-4f44-917f-744a2b80a57c\" (UID: \"81a92903-9f60-4f44-917f-744a2b80a57c\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.649585 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-httpd-run\") pod \"29fd4802-19c7-4e11-b776-c505c03206b0\" (UID: \"29fd4802-19c7-4e11-b776-c505c03206b0\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.658620 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.667209 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.674568 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a3ae90b-73bb-4fbf-887b-c6e432338502-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7a3ae90b-73bb-4fbf-887b-c6e432338502" (UID: "7a3ae90b-73bb-4fbf-887b-c6e432338502"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.708511 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-logs" (OuterVolumeSpecName: "logs") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.715351 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.722905 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88thd\" (UniqueName: \"kubernetes.io/projected/36597581-6c3f-42a7-98ba-155d3bb19320-kube-api-access-88thd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.722955 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgqqf\" (UniqueName: \"kubernetes.io/projected/204761da-3cd3-4024-8268-2c4ade77be70-kube-api-access-kgqqf\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.722967 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s47cc\" (UniqueName: \"kubernetes.io/projected/2bf5bbb4-9ebb-41b9-a888-4144660d088c-kube-api-access-s47cc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.722979 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7xt4\" (UniqueName: \"kubernetes.io/projected/1718aee5-94ce-4682-aa62-28843ff1e2ef-kube-api-access-b7xt4\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.722991 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723001 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723009 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723020 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f7a1181b-900b-40dc-9855-795653215df3-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723030 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723044 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723057 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1718aee5-94ce-4682-aa62-28843ff1e2ef-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723067 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bf5bbb4-9ebb-41b9-a888-4144660d088c-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723076 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h7xw\" (UniqueName: \"kubernetes.io/projected/531a66a5-f4c9-44f1-83a7-a3e4292fef52-kube-api-access-4h7xw\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723086 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klqkd\" (UniqueName: \"kubernetes.io/projected/612a4642-7af7-4d93-a27f-e63a0593a511-kube-api-access-klqkd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723097 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723106 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723117 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqmtn\" (UniqueName: \"kubernetes.io/projected/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-kube-api-access-kqmtn\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723147 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723160 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkfm5\" (UniqueName: \"kubernetes.io/projected/7bf981c0-8ff6-493c-a5fc-14610df3b362-kube-api-access-bkfm5\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723171 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/29fd4802-19c7-4e11-b776-c505c03206b0-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723180 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a3ae90b-73bb-4fbf-887b-c6e432338502-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.723190 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcxpx\" (UniqueName: \"kubernetes.io/projected/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-kube-api-access-mcxpx\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.744554 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.772122 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a1181b-900b-40dc-9855-795653215df3-kube-api-access-kdgw5" (OuterVolumeSpecName: "kube-api-access-kdgw5") pod "f7a1181b-900b-40dc-9855-795653215df3" (UID: "f7a1181b-900b-40dc-9855-795653215df3"). InnerVolumeSpecName "kube-api-access-kdgw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.773037 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.773126 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a3ae90b-73bb-4fbf-887b-c6e432338502-kube-api-access-mc44z" (OuterVolumeSpecName: "kube-api-access-mc44z") pod "7a3ae90b-73bb-4fbf-887b-c6e432338502" (UID: "7a3ae90b-73bb-4fbf-887b-c6e432338502"). InnerVolumeSpecName "kube-api-access-mc44z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.779049 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29fd4802-19c7-4e11-b776-c505c03206b0-kube-api-access-2pg5z" (OuterVolumeSpecName: "kube-api-access-2pg5z") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "kube-api-access-2pg5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.782609 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81a92903-9f60-4f44-917f-744a2b80a57c-kube-api-access-r9w9g" (OuterVolumeSpecName: "kube-api-access-r9w9g") pod "81a92903-9f60-4f44-917f-744a2b80a57c" (UID: "81a92903-9f60-4f44-917f-744a2b80a57c"). InnerVolumeSpecName "kube-api-access-r9w9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.786628 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-scripts" (OuterVolumeSpecName: "scripts") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.787551 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825560 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-certs\") pod \"424dddc1-7019-40ab-b405-a2dcaee08c65\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825640 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-config\") pod \"424dddc1-7019-40ab-b405-a2dcaee08c65\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825678 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx4w9\" (UniqueName: \"kubernetes.io/projected/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-api-access-tx4w9\") pod \"424dddc1-7019-40ab-b405-a2dcaee08c65\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825757 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-672gc\" (UniqueName: \"kubernetes.io/projected/98c89c8e-6557-46b4-adf8-f954dfff68b3-kube-api-access-672gc\") pod \"98c89c8e-6557-46b4-adf8-f954dfff68b3\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825860 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nm7h7\" (UniqueName: \"kubernetes.io/projected/8057ad05-b8c9-4742-a0e2-388f0a901595-kube-api-access-nm7h7\") pod \"8057ad05-b8c9-4742-a0e2-388f0a901595\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825881 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data\") pod \"98c89c8e-6557-46b4-adf8-f954dfff68b3\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.825950 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-combined-ca-bundle\") pod \"8057ad05-b8c9-4742-a0e2-388f0a901595\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826002 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data\") pod \"8057ad05-b8c9-4742-a0e2-388f0a901595\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826060 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98c89c8e-6557-46b4-adf8-f954dfff68b3-logs\") pod \"98c89c8e-6557-46b4-adf8-f954dfff68b3\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826144 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8057ad05-b8c9-4742-a0e2-388f0a901595-logs\") pod \"8057ad05-b8c9-4742-a0e2-388f0a901595\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826212 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data-custom\") pod \"98c89c8e-6557-46b4-adf8-f954dfff68b3\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826282 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-combined-ca-bundle\") pod \"424dddc1-7019-40ab-b405-a2dcaee08c65\" (UID: \"424dddc1-7019-40ab-b405-a2dcaee08c65\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826345 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-combined-ca-bundle\") pod \"98c89c8e-6557-46b4-adf8-f954dfff68b3\" (UID: \"98c89c8e-6557-46b4-adf8-f954dfff68b3\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.826453 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data-custom\") pod \"8057ad05-b8c9-4742-a0e2-388f0a901595\" (UID: \"8057ad05-b8c9-4742-a0e2-388f0a901595\") " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.827125 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.827154 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdgw5\" (UniqueName: \"kubernetes.io/projected/f7a1181b-900b-40dc-9855-795653215df3-kube-api-access-kdgw5\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.827170 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mc44z\" (UniqueName: \"kubernetes.io/projected/7a3ae90b-73bb-4fbf-887b-c6e432338502-kube-api-access-mc44z\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.827185 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9w9g\" (UniqueName: \"kubernetes.io/projected/81a92903-9f60-4f44-917f-744a2b80a57c-kube-api-access-r9w9g\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.827201 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pg5z\" (UniqueName: \"kubernetes.io/projected/29fd4802-19c7-4e11-b776-c505c03206b0-kube-api-access-2pg5z\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.827236 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.851081 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98c89c8e-6557-46b4-adf8-f954dfff68b3-logs" (OuterVolumeSpecName: "logs") pod "98c89c8e-6557-46b4-adf8-f954dfff68b3" (UID: "98c89c8e-6557-46b4-adf8-f954dfff68b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.851928 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8057ad05-b8c9-4742-a0e2-388f0a901595-logs" (OuterVolumeSpecName: "logs") pod "8057ad05-b8c9-4742-a0e2-388f0a901595" (UID: "8057ad05-b8c9-4742-a0e2-388f0a901595"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.932316 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8057ad05-b8c9-4742-a0e2-388f0a901595-kube-api-access-nm7h7" (OuterVolumeSpecName: "kube-api-access-nm7h7") pod "8057ad05-b8c9-4742-a0e2-388f0a901595" (UID: "8057ad05-b8c9-4742-a0e2-388f0a901595"). InnerVolumeSpecName "kube-api-access-nm7h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.941314 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi242e-account-delete-tbszv" event={"ID":"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad","Type":"ContainerStarted","Data":"5727b499d39a732f7374e6f309fd71b0123554ed2209b841a589743c7d151fb7"} Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.942704 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi242e-account-delete-tbszv" secret="" err="secret \"galera-openstack-dockercfg-4stkb\" not found" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.949139 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-api-access-tx4w9" (OuterVolumeSpecName: "kube-api-access-tx4w9") pod "424dddc1-7019-40ab-b405-a2dcaee08c65" (UID: "424dddc1-7019-40ab-b405-a2dcaee08c65"). InnerVolumeSpecName "kube-api-access-tx4w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.951082 4774 generic.go:334] "Generic (PLEG): container finished" podID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" containerID="69c2ce68633246110d46b63e32f22397b08e5e0b28d2e21b8332046a0b226d6e" exitCode=0 Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.951195 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e","Type":"ContainerDied","Data":"69c2ce68633246110d46b63e32f22397b08e5e0b28d2e21b8332046a0b226d6e"} Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.952493 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98c89c8e-6557-46b4-adf8-f954dfff68b3-kube-api-access-672gc" (OuterVolumeSpecName: "kube-api-access-672gc") pod "98c89c8e-6557-46b4-adf8-f954dfff68b3" (UID: "98c89c8e-6557-46b4-adf8-f954dfff68b3"). InnerVolumeSpecName "kube-api-access-672gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.958541 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/98c89c8e-6557-46b4-adf8-f954dfff68b3-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.958612 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8057ad05-b8c9-4742-a0e2-388f0a901595-logs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.958631 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nm7h7\" (UniqueName: \"kubernetes.io/projected/8057ad05-b8c9-4742-a0e2-388f0a901595-kube-api-access-nm7h7\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.971834 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-74459fb479-fkm77" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.972625 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-74459fb479-fkm77" event={"ID":"98c89c8e-6557-46b4-adf8-f954dfff68b3","Type":"ContainerDied","Data":"383b29ac79abfe6531b39a87f9c64791fd5e6b3324ba0b9a5a53efeae744babc"} Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.972743 4774 scope.go:117] "RemoveContainer" containerID="123168316f49f1c892f63242e19f929cf760d9f3f3bdcc32a34469541b54b183" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.981568 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6523-account-delete-t2fnw" event={"ID":"79476096-5d34-4e8a-9f33-3127bacf4e60","Type":"ContainerStarted","Data":"78430813703b5574a2bac69e33958064713d4be7bef77c6035c099968ca709b0"} Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.988806 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican6523-account-delete-t2fnw" secret="" err="secret \"galera-openstack-dockercfg-4stkb\" not found" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.990562 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "98c89c8e-6557-46b4-adf8-f954dfff68b3" (UID: "98c89c8e-6557-46b4-adf8-f954dfff68b3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:21 crc kubenswrapper[4774]: I1121 14:28:21.996785 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8057ad05-b8c9-4742-a0e2-388f0a901595" (UID: "8057ad05-b8c9-4742-a0e2-388f0a901595"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.008911 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance128f-account-delete-w4g2r" event={"ID":"7a3ae90b-73bb-4fbf-887b-c6e432338502","Type":"ContainerDied","Data":"7e25a5af88034a826c29c8882746e3e52535762b02564e39400aa794644d46f9"} Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.008968 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e25a5af88034a826c29c8882746e3e52535762b02564e39400aa794644d46f9" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.009054 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance128f-account-delete-w4g2r" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.057092 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"29fd4802-19c7-4e11-b776-c505c03206b0","Type":"ContainerDied","Data":"d41c483404d04ea1443410bd3c7d1ec1118de9dd5dec53ef7c7720dc6b26add8"} Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.058781 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.065865 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.065914 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.065949 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts podName:79476096-5d34-4e8a-9f33-3127bacf4e60 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:22.565918503 +0000 UTC m=+1493.218117762 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts") pod "barbican6523-account-delete-t2fnw" (UID: "79476096-5d34-4e8a-9f33-3127bacf4e60") : configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.066093 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts podName:58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad nodeName:}" failed. No retries permitted until 2025-11-21 14:28:22.566062968 +0000 UTC m=+1493.218262227 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts") pod "novaapi242e-account-delete-tbszv" (UID: "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad") : configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.075459 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-672gc\" (UniqueName: \"kubernetes.io/projected/98c89c8e-6557-46b4-adf8-f954dfff68b3-kube-api-access-672gc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.075508 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.075523 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.075535 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx4w9\" (UniqueName: \"kubernetes.io/projected/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-api-access-tx4w9\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.090538 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "531a66a5-f4c9-44f1-83a7-a3e4292fef52" (UID: "531a66a5-f4c9-44f1-83a7-a3e4292fef52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.178927 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novacell03102-account-delete-pwxhc" secret="" err="secret \"galera-openstack-dockercfg-4stkb\" not found" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.198131 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.214375 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novaapi242e-account-delete-tbszv" podStartSLOduration=9.214350651 podStartE2EDuration="9.214350651s" podCreationTimestamp="2025-11-21 14:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:28:22.206348998 +0000 UTC m=+1492.858548257" watchObservedRunningTime="2025-11-21 14:28:22.214350651 +0000 UTC m=+1492.866549910" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.218382 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.224964 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0563658a-f1e8-4cae-b165-9697c4673895" path="/var/lib/kubelet/pods/0563658a-f1e8-4cae-b165-9697c4673895/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.225976 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71c4ef14-3bfc-4cb6-806a-a864a67fdf7e" path="/var/lib/kubelet/pods/71c4ef14-3bfc-4cb6-806a-a864a67fdf7e/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.226624 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" path="/var/lib/kubelet/pods/8b17b723-7e23-4a12-916e-0f2d00b72239/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.228047 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" path="/var/lib/kubelet/pods/db7f3cb4-269e-443e-836e-caae1c2d122f/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.238874 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0447367-db69-44ff-8077-29ac2c200dbf" path="/var/lib/kubelet/pods/e0447367-db69-44ff-8077-29ac2c200dbf/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.240566 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0f5811f-60f6-4820-b981-715448365e52" path="/var/lib/kubelet/pods/e0f5811f-60f6-4820-b981-715448365e52/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.241619 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" path="/var/lib/kubelet/pods/f89a7785-0a49-4c28-a587-ec113d2f3635/volumes" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.251541 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.270748 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican6523-account-delete-t2fnw" podStartSLOduration=9.270728515 podStartE2EDuration="9.270728515s" podCreationTimestamp="2025-11-21 14:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:28:22.256529041 +0000 UTC m=+1492.908728300" watchObservedRunningTime="2025-11-21 14:28:22.270728515 +0000 UTC m=+1492.922927774" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.300741 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/novacell03102-account-delete-pwxhc" podStartSLOduration=9.300711609 podStartE2EDuration="9.300711609s" podCreationTimestamp="2025-11-21 14:28:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 14:28:22.286939847 +0000 UTC m=+1492.939139106" watchObservedRunningTime="2025-11-21 14:28:22.300711609 +0000 UTC m=+1492.952910868" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.302907 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.302985 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts podName:3311b6bd-a19b-402c-afe4-22222098c669 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:22.802964975 +0000 UTC m=+1493.455164234 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts") pod "novacell03102-account-delete-pwxhc" (UID: "3311b6bd-a19b-402c-afe4-22222098c669") : configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303231 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303308 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303433 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303485 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder5822-account-delete-7mbkj" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303531 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303569 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement2802-account-delete-ltg2g" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.303682 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.304140 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8d9694746-ctlgk" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.304219 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.304261 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.304296 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.304319 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-748c4cc85c-dkrhb" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.307968 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-config-data" (OuterVolumeSpecName: "config-data") pod "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" (UID: "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.308514 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-576b48cd9b-wr2q7" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.316045 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "424dddc1-7019-40ab-b405-a2dcaee08c65" (UID: "424dddc1-7019-40ab-b405-a2dcaee08c65"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.367548 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.377174 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.378529 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.379635 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.379667 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.404345 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "424dddc1-7019-40ab-b405-a2dcaee08c65" (UID: "424dddc1-7019-40ab-b405-a2dcaee08c65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.406029 4774 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.406050 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.406066 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.412943 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.428271 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.428396 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.445086 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81a92903-9f60-4f44-917f-744a2b80a57c" (UID: "81a92903-9f60-4f44-917f-744a2b80a57c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.454835 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.457993 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98c89c8e-6557-46b4-adf8-f954dfff68b3" (UID: "98c89c8e-6557-46b4-adf8-f954dfff68b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.459619 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-config-data" (OuterVolumeSpecName: "config-data") pod "612a4642-7af7-4d93-a27f-e63a0593a511" (UID: "612a4642-7af7-4d93-a27f-e63a0593a511"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.466686 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.491729 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" (UID: "ad456e3b-04a1-48d6-8fbc-39e3faa00aa0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.507272 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.507679 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle\") pod \"7bf981c0-8ff6-493c-a5fc-14610df3b362\" (UID: \"7bf981c0-8ff6-493c-a5fc-14610df3b362\") " Nov 21 14:28:22 crc kubenswrapper[4774]: W1121 14:28:22.507839 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/7bf981c0-8ff6-493c-a5fc-14610df3b362/volumes/kubernetes.io~secret/combined-ca-bundle Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.507855 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.508854 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.509065 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:28:30.50897455 +0000 UTC m=+1501.161173879 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509161 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509191 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509210 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509223 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509238 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509251 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.509263 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.583174 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-config-data" (OuterVolumeSpecName: "config-data") pod "81a92903-9f60-4f44-917f-744a2b80a57c" (UID: "81a92903-9f60-4f44-917f-744a2b80a57c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.584578 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.598171 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.612563 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.612601 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.612635 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81a92903-9f60-4f44-917f-744a2b80a57c-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.612654 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.612705 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts podName:79476096-5d34-4e8a-9f33-3127bacf4e60 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:23.612656703 +0000 UTC m=+1494.264855962 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts") pod "barbican6523-account-delete-t2fnw" (UID: "79476096-5d34-4e8a-9f33-3127bacf4e60") : configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.613442 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.613611 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts podName:58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad nodeName:}" failed. No retries permitted until 2025-11-21 14:28:23.61358611 +0000 UTC m=+1494.265785429 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts") pod "novaapi242e-account-delete-tbszv" (UID: "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad") : configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.691937 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data" (OuterVolumeSpecName: "config-data") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.721113 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.740253 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8057ad05-b8c9-4742-a0e2-388f0a901595" (UID: "8057ad05-b8c9-4742-a0e2-388f0a901595"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.747644 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" (UID: "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.763556 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "612a4642-7af7-4d93-a27f-e63a0593a511" (UID: "612a4642-7af7-4d93-a27f-e63a0593a511"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.764454 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "424dddc1-7019-40ab-b405-a2dcaee08c65" (UID: "424dddc1-7019-40ab-b405-a2dcaee08c65"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.778486 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.783327 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" (UID: "4ae7ffea-af5d-4804-84cf-fa3c5edfbd27"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.790447 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-config-data" (OuterVolumeSpecName: "config-data") pod "531a66a5-f4c9-44f1-83a7-a3e4292fef52" (UID: "531a66a5-f4c9-44f1-83a7-a3e4292fef52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.796832 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data" (OuterVolumeSpecName: "config-data") pod "8057ad05-b8c9-4742-a0e2-388f0a901595" (UID: "8057ad05-b8c9-4742-a0e2-388f0a901595"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.799705 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "612a4642-7af7-4d93-a27f-e63a0593a511" (UID: "612a4642-7af7-4d93-a27f-e63a0593a511"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.818413 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data" (OuterVolumeSpecName: "config-data") pod "98c89c8e-6557-46b4-adf8-f954dfff68b3" (UID: "98c89c8e-6557-46b4-adf8-f954dfff68b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.818799 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-config-data" (OuterVolumeSpecName: "config-data") pod "ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" (UID: "ad456e3b-04a1-48d6-8fbc-39e3faa00aa0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.821352 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.822209 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle\") pod \"75187d0f-77b0-45ee-a452-1850f0fe7851\" (UID: \"75187d0f-77b0-45ee-a452-1850f0fe7851\") " Nov 21 14:28:22 crc kubenswrapper[4774]: W1121 14:28:22.823957 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/75187d0f-77b0-45ee-a452-1850f0fe7851/volumes/kubernetes.io~secret/combined-ca-bundle Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.823981 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833744 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833804 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833835 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/531a66a5-f4c9-44f1-83a7-a3e4292fef52-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833845 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833856 4774 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/424dddc1-7019-40ab-b405-a2dcaee08c65-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833868 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833879 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833891 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98c89c8e-6557-46b4-adf8-f954dfff68b3-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833900 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833909 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833919 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8057ad05-b8c9-4742-a0e2-388f0a901595-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.833929 4774 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.834014 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: E1121 14:28:22.834076 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts podName:3311b6bd-a19b-402c-afe4-22222098c669 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:23.834056178 +0000 UTC m=+1494.486255437 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts") pod "novacell03102-account-delete-pwxhc" (UID: "3311b6bd-a19b-402c-afe4-22222098c669") : configmap "openstack-scripts" not found Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.842239 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-config-data" (OuterVolumeSpecName: "config-data") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.890331 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-config-data" (OuterVolumeSpecName: "config-data") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.926415 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "36597581-6c3f-42a7-98ba-155d3bb19320" (UID: "36597581-6c3f-42a7-98ba-155d3bb19320"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.933443 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.935803 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.935845 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.935858 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.935871 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/36597581-6c3f-42a7-98ba-155d3bb19320-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.936113 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data" (OuterVolumeSpecName: "config-data") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.941005 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.949721 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.974295 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7bf981c0-8ff6-493c-a5fc-14610df3b362" (UID: "7bf981c0-8ff6-493c-a5fc-14610df3b362"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.978192 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:22 crc kubenswrapper[4774]: I1121 14:28:22.997210 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "612a4642-7af7-4d93-a27f-e63a0593a511" (UID: "612a4642-7af7-4d93-a27f-e63a0593a511"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.000049 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.007466 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.018136 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-config-data" (OuterVolumeSpecName: "config-data") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.020896 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4dd9e6d7-d0b1-49f3-920a-34e434835bfa" (UID: "4dd9e6d7-d0b1-49f3-920a-34e434835bfa"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.034727 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1718aee5-94ce-4682-aa62-28843ff1e2ef" (UID: "1718aee5-94ce-4682-aa62-28843ff1e2ef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039425 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039470 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039479 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/612a4642-7af7-4d93-a27f-e63a0593a511-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039490 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039501 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039513 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039521 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1718aee5-94ce-4682-aa62-28843ff1e2ef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039534 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039543 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bf981c0-8ff6-493c-a5fc-14610df3b362-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039555 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.039564 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4dd9e6d7-d0b1-49f3-920a-34e434835bfa-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.058855 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-config-data" (OuterVolumeSpecName: "config-data") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.061891 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "29fd4802-19c7-4e11-b776-c505c03206b0" (UID: "29fd4802-19c7-4e11-b776-c505c03206b0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.074665 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data" (OuterVolumeSpecName: "config-data") pod "75187d0f-77b0-45ee-a452-1850f0fe7851" (UID: "75187d0f-77b0-45ee-a452-1850f0fe7851"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.096432 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "204761da-3cd3-4024-8268-2c4ade77be70" (UID: "204761da-3cd3-4024-8268-2c4ade77be70"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.141765 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/204761da-3cd3-4024-8268-2c4ade77be70-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.141802 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.141828 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29fd4802-19c7-4e11-b776-c505c03206b0-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.141838 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75187d0f-77b0-45ee-a452-1850f0fe7851-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.348281 4774 generic.go:334] "Generic (PLEG): container finished" podID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerID="e113bb91e61fb20bd55da6f381dd07a86f741c04641af203c9cd800b9d16d231" exitCode=0 Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.360710 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4c16af5b-77af-4097-ad41-42aaa0aac4a1/ovn-northd/0.log" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.360771 4774 generic.go:334] "Generic (PLEG): container finished" podID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" exitCode=139 Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.424706 4774 generic.go:334] "Generic (PLEG): container finished" podID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerID="bd469b90ad8e693c30dda153b3857ce28b94eb20f3a3677f460f839008c9f746" exitCode=1 Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.436440 4774 generic.go:334] "Generic (PLEG): container finished" podID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerID="5727b499d39a732f7374e6f309fd71b0123554ed2209b841a589743c7d151fb7" exitCode=1 Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.437453 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/neutronfb24-account-delete-z2nw8" secret="" err="secret \"galera-openstack-dockercfg-4stkb\" not found" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.437530 4774 scope.go:117] "RemoveContainer" containerID="bd469b90ad8e693c30dda153b3857ce28b94eb20f3a3677f460f839008c9f746" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.461996 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerID="7d2c6e460846a332f45e2cd1fd8b4211e1fef71fdaba2c330e61b8c2240fa3a3" exitCode=0 Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.469674 4774 generic.go:334] "Generic (PLEG): container finished" podID="3311b6bd-a19b-402c-afe4-22222098c669" containerID="51d0feed6ebdd647d9dc85625eb572f976beebc0fdd506524e354f34444e80c4" exitCode=1 Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.480924 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/novaapi242e-account-delete-tbszv" secret="" err="secret \"galera-openstack-dockercfg-4stkb\" not found" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.481002 4774 scope.go:117] "RemoveContainer" containerID="5727b499d39a732f7374e6f309fd71b0123554ed2209b841a589743c7d151fb7" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.497634 4774 generic.go:334] "Generic (PLEG): container finished" podID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerID="78430813703b5574a2bac69e33958064713d4be7bef77c6035c099968ca709b0" exitCode=1 Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.562109 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.562175 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts podName:650c7a92-1469-4a9c-9a60-a846fe7ed823 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:24.062155376 +0000 UTC m=+1494.714354635 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts") pod "neutronfb24-account-delete-z2nw8" (UID: "650c7a92-1469-4a9c-9a60-a846fe7ed823") : configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.573731 4774 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack/barbican6523-account-delete-t2fnw" secret="" err="secret \"galera-openstack-dockercfg-4stkb\" not found" Nov 21 14:28:23 crc kubenswrapper[4774]: I1121 14:28:23.573800 4774 scope.go:117] "RemoveContainer" containerID="78430813703b5574a2bac69e33958064713d4be7bef77c6035c099968ca709b0" Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.663123 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.663214 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts podName:58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad nodeName:}" failed. No retries permitted until 2025-11-21 14:28:25.663193272 +0000 UTC m=+1496.315392531 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts") pod "novaapi242e-account-delete-tbszv" (UID: "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad") : configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.663597 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.663706 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts podName:79476096-5d34-4e8a-9f33-3127bacf4e60 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:25.663684246 +0000 UTC m=+1496.315883505 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts") pod "barbican6523-account-delete-t2fnw" (UID: "79476096-5d34-4e8a-9f33-3127bacf4e60") : configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.869375 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:23 crc kubenswrapper[4774]: E1121 14:28:23.869830 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts podName:3311b6bd-a19b-402c-afe4-22222098c669 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:25.869797376 +0000 UTC m=+1496.521996635 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts") pod "novacell03102-account-delete-pwxhc" (UID: "3311b6bd-a19b-402c-afe4-22222098c669") : configmap "openstack-scripts" not found Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.073230 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.073328 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts podName:650c7a92-1469-4a9c-9a60-a846fe7ed823 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:25.073306519 +0000 UTC m=+1495.725505778 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts") pod "neutronfb24-account-delete-z2nw8" (UID: "650c7a92-1469-4a9c-9a60-a846fe7ed823") : configmap "openstack-scripts" not found Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.112494 4774 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.95s" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.129139 4774 scope.go:117] "RemoveContainer" containerID="571c60de19b673d0a2cf6499c4d0ad765e15a4171ae78a3dcb8552bb3605e8e6" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.226248 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell03102-account-delete-pwxhc" event={"ID":"3311b6bd-a19b-402c-afe4-22222098c669","Type":"ContainerStarted","Data":"51d0feed6ebdd647d9dc85625eb572f976beebc0fdd506524e354f34444e80c4"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227038 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"81a92903-9f60-4f44-917f-744a2b80a57c","Type":"ContainerDied","Data":"1367553304acf585ee869af4d57fce8ba6a46d4fa49ae9f6d2c93d22109736d3"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227093 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-866df86b64-5t8kn" event={"ID":"8057ad05-b8c9-4742-a0e2-388f0a901595","Type":"ContainerDied","Data":"733fb25f34b24ef96aca89233cb4902b2288ab842e8dbc88d77d6eef124dc1a8"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227115 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"424dddc1-7019-40ab-b405-a2dcaee08c65","Type":"ContainerDied","Data":"eca47116e2e11cf007e39b5eb2ad4e3e760120b62040ea790aedf25cadf061e1"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227130 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"64e33a39-c371-477f-b1c9-d58189db4bc8","Type":"ContainerDied","Data":"e113bb91e61fb20bd55da6f381dd07a86f741c04641af203c9cd800b9d16d231"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227143 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"64e33a39-c371-477f-b1c9-d58189db4bc8","Type":"ContainerDied","Data":"dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227154 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227175 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-j6zr8"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227193 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-j6zr8"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227207 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-6523-account-create-4fm5p"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227220 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4c16af5b-77af-4097-ad41-42aaa0aac4a1","Type":"ContainerDied","Data":"382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227236 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-6523-account-create-4fm5p"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227247 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"4c16af5b-77af-4097-ad41-42aaa0aac4a1","Type":"ContainerDied","Data":"fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227256 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227266 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican6523-account-delete-t2fnw"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227279 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-29l4m"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227292 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-29l4m"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227303 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5822-account-create-7wb8z"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227314 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfb24-account-delete-z2nw8" event={"ID":"650c7a92-1469-4a9c-9a60-a846fe7ed823","Type":"ContainerDied","Data":"bd469b90ad8e693c30dda153b3857ce28b94eb20f3a3677f460f839008c9f746"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227328 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi242e-account-delete-tbszv" event={"ID":"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad","Type":"ContainerDied","Data":"5727b499d39a732f7374e6f309fd71b0123554ed2209b841a589743c7d151fb7"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227344 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5822-account-create-7wb8z"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227357 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder5822-account-delete-7mbkj"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227371 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder5822-account-delete-7mbkj"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227383 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerDied","Data":"7d2c6e460846a332f45e2cd1fd8b4211e1fef71fdaba2c330e61b8c2240fa3a3"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227398 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9pp78"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227410 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e","Type":"ContainerDied","Data":"b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227422 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227430 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9pp78"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227442 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement2802-account-delete-ltg2g"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227452 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell03102-account-delete-pwxhc" event={"ID":"3311b6bd-a19b-402c-afe4-22222098c669","Type":"ContainerDied","Data":"51d0feed6ebdd647d9dc85625eb572f976beebc0fdd506524e354f34444e80c4"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227465 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-2802-account-create-tkf4w"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227477 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6523-account-delete-t2fnw" event={"ID":"79476096-5d34-4e8a-9f33-3127bacf4e60","Type":"ContainerDied","Data":"78430813703b5574a2bac69e33958064713d4be7bef77c6035c099968ca709b0"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227488 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-2802-account-create-tkf4w"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227500 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement2802-account-delete-ltg2g"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227510 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-t4rqm"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227519 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-t4rqm"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227529 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance128f-account-delete-w4g2r"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227538 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance128f-account-delete-w4g2r"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227550 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-128f-account-create-gpf7s"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227559 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-128f-account-create-gpf7s"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227575 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-mhzgm"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227585 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-mhzgm"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227599 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronfb24-account-delete-z2nw8"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227609 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-fb24-account-create-tlmv2"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227620 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-fb24-account-create-tlmv2"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227629 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-d6wf2"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227637 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-d6wf2"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227647 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi242e-account-delete-tbszv"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227655 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-242e-account-create-84s8b"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227663 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-242e-account-create-84s8b"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227673 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-7vg67"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227684 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-7vg67"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227694 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-3102-account-create-g4bl5"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227704 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell03102-account-delete-pwxhc"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.227714 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-3102-account-create-g4bl5"] Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.532335 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"dc1f1975-32c8-494c-b6c7-69a72353879f","Type":"ContainerDied","Data":"0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.532393 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.535091 4774 generic.go:334] "Generic (PLEG): container finished" podID="57cdbc4f-20e9-4189-872d-f6f3c58f7093" containerID="aea70590e231f1b48851f4ffa1e6852272819cf991813022fe7bff259b0f4d04" exitCode=0 Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.535118 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b76744b8b-5ws6g" event={"ID":"57cdbc4f-20e9-4189-872d-f6f3c58f7093","Type":"ContainerDied","Data":"aea70590e231f1b48851f4ffa1e6852272819cf991813022fe7bff259b0f4d04"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.535133 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b76744b8b-5ws6g" event={"ID":"57cdbc4f-20e9-4189-872d-f6f3c58f7093","Type":"ContainerDied","Data":"599f526dc1ff79383e207e6cde0b3433691bbb4e2791ef2e3034c9cdbdf138ff"} Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.535143 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="599f526dc1ff79383e207e6cde0b3433691bbb4e2791ef2e3034c9cdbdf138ff" Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.730543 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9 is running failed: container process not found" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.731352 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9 is running failed: container process not found" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.731742 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9 is running failed: container process not found" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Nov 21 14:28:24 crc kubenswrapper[4774]: E1121 14:28:24.731778 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-northd-0" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.751423 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.770469 4774 scope.go:117] "RemoveContainer" containerID="08dcf92110aca28bb33e09d2cf80555b027cc58cf28e0ba6099d79517b3e3e96" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.792520 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.794046 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-config-data\") pod \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.794097 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kolla-config\") pod \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.794159 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-memcached-tls-certs\") pod \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.794395 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-combined-ca-bundle\") pod \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.794438 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztnvx\" (UniqueName: \"kubernetes.io/projected/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kube-api-access-ztnvx\") pod \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\" (UID: \"f38fad89-cd6f-47d4-82f9-a761f6a9ed9e\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.796599 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" (UID: "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.797137 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-config-data" (OuterVolumeSpecName: "config-data") pod "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" (UID: "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.818294 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kube-api-access-ztnvx" (OuterVolumeSpecName: "kube-api-access-ztnvx") pod "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" (UID: "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e"). InnerVolumeSpecName "kube-api-access-ztnvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.819780 4774 scope.go:117] "RemoveContainer" containerID="66249ecbe2c3348c6acd48e9804c896c943d2119544945a4641b3cd22603525d" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.877766 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" (UID: "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.896237 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-server-conf\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.896572 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/64e33a39-c371-477f-b1c9-d58189db4bc8-erlang-cookie-secret\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.896658 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-tls\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.896794 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-confd\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.896955 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-plugins-conf\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897050 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxzhf\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-kube-api-access-hxzhf\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897129 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-erlang-cookie\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897193 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897267 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/64e33a39-c371-477f-b1c9-d58189db4bc8-pod-info\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897345 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-plugins\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897451 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data\") pod \"64e33a39-c371-477f-b1c9-d58189db4bc8\" (UID: \"64e33a39-c371-477f-b1c9-d58189db4bc8\") " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897893 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.897967 4774 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.898027 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.898080 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztnvx\" (UniqueName: \"kubernetes.io/projected/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-kube-api-access-ztnvx\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.899475 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.899526 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.899856 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.903786 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e33a39-c371-477f-b1c9-d58189db4bc8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.904480 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-kube-api-access-hxzhf" (OuterVolumeSpecName: "kube-api-access-hxzhf") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "kube-api-access-hxzhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.906368 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/64e33a39-c371-477f-b1c9-d58189db4bc8-pod-info" (OuterVolumeSpecName: "pod-info") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.910931 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.912140 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.932154 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" (UID: "f38fad89-cd6f-47d4-82f9-a761f6a9ed9e"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.986161 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-server-conf" (OuterVolumeSpecName: "server-conf") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.989363 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data" (OuterVolumeSpecName: "config-data") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999623 4774 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999655 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999670 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxzhf\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-kube-api-access-hxzhf\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999693 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999704 4774 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/64e33a39-c371-477f-b1c9-d58189db4bc8-pod-info\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999714 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999724 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999733 4774 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/64e33a39-c371-477f-b1c9-d58189db4bc8-server-conf\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999744 4774 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/64e33a39-c371-477f-b1c9-d58189db4bc8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999753 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:24 crc kubenswrapper[4774]: I1121 14:28:24.999762 4774 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.018225 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.042445 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "64e33a39-c371-477f-b1c9-d58189db4bc8" (UID: "64e33a39-c371-477f-b1c9-d58189db4bc8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.101185 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/64e33a39-c371-477f-b1c9-d58189db4bc8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.101219 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: E1121 14:28:25.101329 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:25 crc kubenswrapper[4774]: E1121 14:28:25.101388 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts podName:650c7a92-1469-4a9c-9a60-a846fe7ed823 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:27.101371263 +0000 UTC m=+1497.753570532 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts") pod "neutronfb24-account-delete-z2nw8" (UID: "650c7a92-1469-4a9c-9a60-a846fe7ed823") : configmap "openstack-scripts" not found Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.353542 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_4c16af5b-77af-4097-ad41-42aaa0aac4a1/ovn-northd/0.log" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.353658 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.370636 4774 scope.go:117] "RemoveContainer" containerID="464ba226111b0f9cd638d2e2cf2340bbb1479d0450894949cd4f99913bbc9678" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.373085 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406005 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-metrics-certs-tls-certs\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406081 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-sg-core-conf-yaml\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406116 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fn6hg\" (UniqueName: \"kubernetes.io/projected/dc1f1975-32c8-494c-b6c7-69a72353879f-kube-api-access-fn6hg\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406169 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-northd-tls-certs\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406215 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-rundir\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406257 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-ceilometer-tls-certs\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406297 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-combined-ca-bundle\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406337 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-scripts\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406365 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-run-httpd\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406447 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-config-data\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406518 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-log-httpd\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406553 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-config\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406602 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-scripts\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406635 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-combined-ca-bundle\") pod \"dc1f1975-32c8-494c-b6c7-69a72353879f\" (UID: \"dc1f1975-32c8-494c-b6c7-69a72353879f\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.406672 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8wwb2\" (UniqueName: \"kubernetes.io/projected/4c16af5b-77af-4097-ad41-42aaa0aac4a1-kube-api-access-8wwb2\") pod \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\" (UID: \"4c16af5b-77af-4097-ad41-42aaa0aac4a1\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.409997 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-config" (OuterVolumeSpecName: "config") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.410915 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.419028 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-scripts" (OuterVolumeSpecName: "scripts") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.425516 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.425618 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.427537 4774 scope.go:117] "RemoveContainer" containerID="72732cc3c72816545f8f6bd38e3894a25c914501104260c3cdc2219287bc3e97" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.448960 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-576b48cd9b-wr2q7" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.161:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.448982 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-576b48cd9b-wr2q7" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.161:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.481004 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.483566 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.495088 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-scripts" (OuterVolumeSpecName: "scripts") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.514216 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntwsc\" (UniqueName: \"kubernetes.io/projected/57cdbc4f-20e9-4189-872d-f6f3c58f7093-kube-api-access-ntwsc\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.514580 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-config-data\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.514717 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-credential-keys\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.514799 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-scripts\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.514916 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-internal-tls-certs\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.514986 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts\") pod \"3311b6bd-a19b-402c-afe4-22222098c669\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515072 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-public-tls-certs\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515159 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf2lt\" (UniqueName: \"kubernetes.io/projected/3311b6bd-a19b-402c-afe4-22222098c669-kube-api-access-jf2lt\") pod \"3311b6bd-a19b-402c-afe4-22222098c669\" (UID: \"3311b6bd-a19b-402c-afe4-22222098c669\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515269 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-combined-ca-bundle\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515366 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-fernet-keys\") pod \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\" (UID: \"57cdbc4f-20e9-4189-872d-f6f3c58f7093\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515752 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-rundir\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515828 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515900 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.515954 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc1f1975-32c8-494c-b6c7-69a72353879f-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.516004 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.516057 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4c16af5b-77af-4097-ad41-42aaa0aac4a1-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.521219 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc1f1975-32c8-494c-b6c7-69a72353879f-kube-api-access-fn6hg" (OuterVolumeSpecName: "kube-api-access-fn6hg") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "kube-api-access-fn6hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.524233 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3311b6bd-a19b-402c-afe4-22222098c669" (UID: "3311b6bd-a19b-402c-afe4-22222098c669"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.539980 4774 scope.go:117] "RemoveContainer" containerID="1bed157d3f1b09ec22281912c29b9fe8e5b372b41ebbf607b1b08a4791141c7e" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.543858 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.561036 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-scripts" (OuterVolumeSpecName: "scripts") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.563583 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.567198 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3311b6bd-a19b-402c-afe4-22222098c669-kube-api-access-jf2lt" (OuterVolumeSpecName: "kube-api-access-jf2lt") pod "3311b6bd-a19b-402c-afe4-22222098c669" (UID: "3311b6bd-a19b-402c-afe4-22222098c669"). InnerVolumeSpecName "kube-api-access-jf2lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.584202 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.616927 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57cdbc4f-20e9-4189-872d-f6f3c58f7093-kube-api-access-ntwsc" (OuterVolumeSpecName: "kube-api-access-ntwsc") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "kube-api-access-ntwsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.617064 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619671 4774 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619694 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntwsc\" (UniqueName: \"kubernetes.io/projected/57cdbc4f-20e9-4189-872d-f6f3c58f7093-kube-api-access-ntwsc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619705 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fn6hg\" (UniqueName: \"kubernetes.io/projected/dc1f1975-32c8-494c-b6c7-69a72353879f-kube-api-access-fn6hg\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619717 4774 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619727 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619737 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619746 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3311b6bd-a19b-402c-afe4-22222098c669-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.619755 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf2lt\" (UniqueName: \"kubernetes.io/projected/3311b6bd-a19b-402c-afe4-22222098c669-kube-api-access-jf2lt\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.640055 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c16af5b-77af-4097-ad41-42aaa0aac4a1-kube-api-access-8wwb2" (OuterVolumeSpecName: "kube-api-access-8wwb2") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "kube-api-access-8wwb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.657070 4774 scope.go:117] "RemoveContainer" containerID="f7e24f67518e454751426c3c5dc72df1fbe276fbaaac5b326b29c0ee877432a8" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.663390 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.693417 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cinder-api-0" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.167:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.694005 4774 generic.go:334] "Generic (PLEG): container finished" podID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerID="49d17f16cc51398f705c8fb2c63a51d77570b36bf271a56c53df10dddd9ceca5" exitCode=1 Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.694111 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi242e-account-delete-tbszv" event={"ID":"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad","Type":"ContainerDied","Data":"49d17f16cc51398f705c8fb2c63a51d77570b36bf271a56c53df10dddd9ceca5"} Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.703083 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-config-data" (OuterVolumeSpecName: "config-data") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.737290 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.737957 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738118 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-default\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738138 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-galera-tls-certs\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738225 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-operator-scripts\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738304 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-generated\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738344 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-kolla-config\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738368 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-combined-ca-bundle\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738406 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2gl9\" (UniqueName: \"kubernetes.io/projected/7030e5d8-2d2b-4cc5-a283-339599595a18-kube-api-access-k2gl9\") pod \"7030e5d8-2d2b-4cc5-a283-339599595a18\" (UID: \"7030e5d8-2d2b-4cc5-a283-339599595a18\") " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738947 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8wwb2\" (UniqueName: \"kubernetes.io/projected/4c16af5b-77af-4097-ad41-42aaa0aac4a1-kube-api-access-8wwb2\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.738964 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: E1121 14:28:25.739034 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:25 crc kubenswrapper[4774]: E1121 14:28:25.739100 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts podName:79476096-5d34-4e8a-9f33-3127bacf4e60 nodeName:}" failed. No retries permitted until 2025-11-21 14:28:29.739077666 +0000 UTC m=+1500.391276925 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts") pod "barbican6523-account-delete-t2fnw" (UID: "79476096-5d34-4e8a-9f33-3127bacf4e60") : configmap "openstack-scripts" not found Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.739363 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.740383 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.741049 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.741384 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: E1121 14:28:25.741506 4774 configmap.go:193] Couldn't get configMap openstack/openstack-scripts: configmap "openstack-scripts" not found Nov 21 14:28:25 crc kubenswrapper[4774]: E1121 14:28:25.741578 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts podName:58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad nodeName:}" failed. No retries permitted until 2025-11-21 14:28:29.741549738 +0000 UTC m=+1500.393748987 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts") pod "novaapi242e-account-delete-tbszv" (UID: "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad") : configmap "openstack-scripts" not found Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.748693 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.757947 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.769015 4774 scope.go:117] "RemoveContainer" containerID="5727b499d39a732f7374e6f309fd71b0123554ed2209b841a589743c7d151fb7" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.770044 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.776718 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.779195 4774 generic.go:334] "Generic (PLEG): container finished" podID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerID="a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06" exitCode=0 Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.779316 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7030e5d8-2d2b-4cc5-a283-339599595a18","Type":"ContainerDied","Data":"a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06"} Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.779410 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7030e5d8-2d2b-4cc5-a283-339599595a18","Type":"ContainerDied","Data":"3ff256fc9339b1b56af433c2e3b1578584343f46f5866e2f99bc3016dfa20d81"} Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.779562 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.783651 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.789495 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.820761 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novacell03102-account-delete-pwxhc" event={"ID":"3311b6bd-a19b-402c-afe4-22222098c669","Type":"ContainerDied","Data":"8557e9b8b109d70798eeacd9ade5c0706639c8124bf7a205542645d43c181df1"} Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.820980 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novacell03102-account-delete-pwxhc" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.825857 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7030e5d8-2d2b-4cc5-a283-339599595a18-kube-api-access-k2gl9" (OuterVolumeSpecName: "kube-api-access-k2gl9") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "kube-api-access-k2gl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.826732 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.843113 4774 generic.go:334] "Generic (PLEG): container finished" podID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerID="3b0b0a6a69ecdbec432bc57521dfde480ebde0b424e3bfb51f0ad2b0bfd138f8" exitCode=1 Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.843207 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6523-account-delete-t2fnw" event={"ID":"79476096-5d34-4e8a-9f33-3127bacf4e60","Type":"ContainerDied","Data":"3b0b0a6a69ecdbec432bc57521dfde480ebde0b424e3bfb51f0ad2b0bfd138f8"} Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.846173 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.846227 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.846243 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.846263 4774 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.846274 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2gl9\" (UniqueName: \"kubernetes.io/projected/7030e5d8-2d2b-4cc5-a283-339599595a18-kube-api-access-k2gl9\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.846284 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7030e5d8-2d2b-4cc5-a283-339599595a18-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.850152 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.850425 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.868042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.868098 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.868164 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-576b48cd9b-wr2q7"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.872231 4774 generic.go:334] "Generic (PLEG): container finished" podID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerID="d6a7d7a7c3bed4f014ad9ebd5dbb104ef9c38e3678d10bef857d3fe25fca8223" exitCode=1 Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.872448 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfb24-account-delete-z2nw8" event={"ID":"650c7a92-1469-4a9c-9a60-a846fe7ed823","Type":"ContainerDied","Data":"d6a7d7a7c3bed4f014ad9ebd5dbb104ef9c38e3678d10bef857d3fe25fca8223"} Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.891683 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.895719 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-576b48cd9b-wr2q7"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.900024 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.900631 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.900643 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b76744b8b-5ws6g" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.900664 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.900639 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.902318 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.944513 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.946976 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-config-data" (OuterVolumeSpecName: "config-data") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.948557 4774 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.948580 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.948595 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.948618 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.948629 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.948642 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.953787 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.991318 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-8d9694746-ctlgk"] Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.994376 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "dc1f1975-32c8-494c-b6c7-69a72353879f" (UID: "dc1f1975-32c8-494c-b6c7-69a72353879f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:25 crc kubenswrapper[4774]: I1121 14:28:25.999294 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "57cdbc4f-20e9-4189-872d-f6f3c58f7093" (UID: "57cdbc4f-20e9-4189-872d-f6f3c58f7093"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.002011 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "7030e5d8-2d2b-4cc5-a283-339599595a18" (UID: "7030e5d8-2d2b-4cc5-a283-339599595a18"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.002142 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-8d9694746-ctlgk"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.006470 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.009215 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "4c16af5b-77af-4097-ad41-42aaa0aac4a1" (UID: "4c16af5b-77af-4097-ad41-42aaa0aac4a1"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.014135 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.020226 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-866df86b64-5t8kn"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.022691 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-866df86b64-5t8kn"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.027334 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.028802 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.032255 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.037130 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-proxy-748c4cc85c-dkrhb"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.042162 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-proxy-748c4cc85c-dkrhb"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.048066 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.050498 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.050612 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/57cdbc4f-20e9-4189-872d-f6f3c58f7093-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.050711 4774 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7030e5d8-2d2b-4cc5-a283-339599595a18-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.050792 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.050879 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c16af5b-77af-4097-ad41-42aaa0aac4a1-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.050963 4774 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc1f1975-32c8-494c-b6c7-69a72353879f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.052791 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.080070 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.096903 4774 scope.go:117] "RemoveContainer" containerID="a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.115979 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11c290e3-78eb-4deb-82ac-8b3e93ef5c66" path="/var/lib/kubelet/pods/11c290e3-78eb-4deb-82ac-8b3e93ef5c66/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.116567 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" path="/var/lib/kubelet/pods/1718aee5-94ce-4682-aa62-28843ff1e2ef/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.117231 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="204761da-3cd3-4024-8268-2c4ade77be70" path="/var/lib/kubelet/pods/204761da-3cd3-4024-8268-2c4ade77be70/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.118494 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2969283f-bdf3-4a7c-88c3-04e0b009a6b9" path="/var/lib/kubelet/pods/2969283f-bdf3-4a7c-88c3-04e0b009a6b9/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.119279 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" path="/var/lib/kubelet/pods/29fd4802-19c7-4e11-b776-c505c03206b0/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.120338 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a52bd28-14b8-4988-a291-6072e60211f3" path="/var/lib/kubelet/pods/2a52bd28-14b8-4988-a291-6072e60211f3/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.121396 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bf5bbb4-9ebb-41b9-a888-4144660d088c" path="/var/lib/kubelet/pods/2bf5bbb4-9ebb-41b9-a888-4144660d088c/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.121954 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" path="/var/lib/kubelet/pods/36597581-6c3f-42a7-98ba-155d3bb19320/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.123010 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="424dddc1-7019-40ab-b405-a2dcaee08c65" path="/var/lib/kubelet/pods/424dddc1-7019-40ab-b405-a2dcaee08c65/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.123525 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46518ae6-7502-4276-8b86-58e85eff4951" path="/var/lib/kubelet/pods/46518ae6-7502-4276-8b86-58e85eff4951/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.124106 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" path="/var/lib/kubelet/pods/4ae7ffea-af5d-4804-84cf-fa3c5edfbd27/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.125127 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" path="/var/lib/kubelet/pods/4dd9e6d7-d0b1-49f3-920a-34e434835bfa/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.125699 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7e56cf-c53d-4d3e-8e76-a8de6556546b" path="/var/lib/kubelet/pods/4f7e56cf-c53d-4d3e-8e76-a8de6556546b/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.126219 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" path="/var/lib/kubelet/pods/531a66a5-f4c9-44f1-83a7-a3e4292fef52/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.127232 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596ae3ef-3b18-4646-8b3c-34b6db752b22" path="/var/lib/kubelet/pods/596ae3ef-3b18-4646-8b3c-34b6db752b22/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.127770 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6360ba89-8432-49d8-b5ea-97a52784ea66" path="/var/lib/kubelet/pods/6360ba89-8432-49d8-b5ea-97a52784ea66/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.128391 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6513cfab-0b30-4103-8e71-3492d2013657" path="/var/lib/kubelet/pods/6513cfab-0b30-4103-8e71-3492d2013657/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.129388 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a3ae90b-73bb-4fbf-887b-c6e432338502" path="/var/lib/kubelet/pods/7a3ae90b-73bb-4fbf-887b-c6e432338502/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.129994 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" path="/var/lib/kubelet/pods/8057ad05-b8c9-4742-a0e2-388f0a901595/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.130598 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81a92903-9f60-4f44-917f-744a2b80a57c" path="/var/lib/kubelet/pods/81a92903-9f60-4f44-917f-744a2b80a57c/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.131155 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="874bb9d0-cbc9-4158-928d-8d6267fa02ab" path="/var/lib/kubelet/pods/874bb9d0-cbc9-4158-928d-8d6267fa02ab/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.132545 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f971e8a-d223-4b26-860a-b2ea8f3d545f" path="/var/lib/kubelet/pods/8f971e8a-d223-4b26-860a-b2ea8f3d545f/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.133158 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab5c0068-d61b-4d09-8632-70a5b637910c" path="/var/lib/kubelet/pods/ab5c0068-d61b-4d09-8632-70a5b637910c/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.135139 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" path="/var/lib/kubelet/pods/ad456e3b-04a1-48d6-8fbc-39e3faa00aa0/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.138115 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0e7f310-44e3-41ea-b143-cc1074c854a6" path="/var/lib/kubelet/pods/c0e7f310-44e3-41ea-b143-cc1074c854a6/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.138653 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a1181b-900b-40dc-9855-795653215df3" path="/var/lib/kubelet/pods/f7a1181b-900b-40dc-9855-795653215df3/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.139419 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce49e63-9930-42ca-83ff-fc116eeacf1d" path="/var/lib/kubelet/pods/fce49e63-9930-42ca-83ff-fc116eeacf1d/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.140700 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fed1a3ac-e920-47b6-b864-6f1ec34c0770" path="/var/lib/kubelet/pods/fed1a3ac-e920-47b6-b864-6f1ec34c0770/volumes" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.142409 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.142446 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-74459fb479-fkm77"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.142461 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-74459fb479-fkm77"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.163322 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.164640 4774 scope.go:117] "RemoveContainer" containerID="223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.183868 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.218066 4774 scope.go:117] "RemoveContainer" containerID="a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06" Nov 21 14:28:26 crc kubenswrapper[4774]: E1121 14:28:26.218585 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06\": container with ID starting with a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06 not found: ID does not exist" containerID="a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.218659 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06"} err="failed to get container status \"a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06\": rpc error: code = NotFound desc = could not find container \"a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06\": container with ID starting with a9e8cfc207e85c34866569c64a17d7663bd727afc2cfe872b88cfb7cf0d28c06 not found: ID does not exist" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.218705 4774 scope.go:117] "RemoveContainer" containerID="223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd" Nov 21 14:28:26 crc kubenswrapper[4774]: E1121 14:28:26.219064 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd\": container with ID starting with 223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd not found: ID does not exist" containerID="223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.219101 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd"} err="failed to get container status \"223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd\": rpc error: code = NotFound desc = could not find container \"223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd\": container with ID starting with 223dbde4450f7b7ff286da8e07b37592fd96e08300395e14958a6a48ec3f79fd not found: ID does not exist" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.219126 4774 scope.go:117] "RemoveContainer" containerID="51d0feed6ebdd647d9dc85625eb572f976beebc0fdd506524e354f34444e80c4" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.274230 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.280429 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.283482 4774 scope.go:117] "RemoveContainer" containerID="78430813703b5574a2bac69e33958064713d4be7bef77c6035c099968ca709b0" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.307664 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:26 crc kubenswrapper[4774]: E1121 14:28:26.369125 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7030e5d8_2d2b_4cc5_a283_339599595a18.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64e33a39_c371_477f_b1c9_d58189db4bc8.slice/crio-dc6d1662a0c2b78373b991c1b1e92ae633ed34194c4c88028696266cdba3ae00\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7030e5d8_2d2b_4cc5_a283_339599595a18.slice/crio-3ff256fc9339b1b56af433c2e3b1578584343f46f5866e2f99bc3016dfa20d81\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c16af5b_77af_4097_ad41_42aaa0aac4a1.slice/crio-fbbc56959cbb2a4349d71256067a2120bf33ba3f0f2153382d4014df261fd440\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc1f1975_32c8_494c_b6c7_69a72353879f.slice/crio-0c88d9b23222893490a25b9deae558b453daa1d5fa8eba5b37e1ba8fc2b53dd8\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf38fad89_cd6f_47d4_82f9_a761f6a9ed9e.slice/crio-b91420377d8a19f9ecfad89bbe133f3c6f735de99c7abdd0a922e0fece1382a7\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf38fad89_cd6f_47d4_82f9_a761f6a9ed9e.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3311b6bd_a19b_402c_afe4_22222098c669.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4c16af5b_77af_4097_ad41_42aaa0aac4a1.slice\": RecentStats: unable to find data in memory cache]" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.384484 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b76744b8b-5ws6g"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.402596 4774 scope.go:117] "RemoveContainer" containerID="bd469b90ad8e693c30dda153b3857ce28b94eb20f3a3677f460f839008c9f746" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.489784 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts\") pod \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.501269 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4m9k\" (UniqueName: \"kubernetes.io/projected/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-kube-api-access-r4m9k\") pod \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\" (UID: \"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.491332 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" (UID: "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.506203 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-kube-api-access-r4m9k" (OuterVolumeSpecName: "kube-api-access-r4m9k") pod "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" (UID: "58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad"). InnerVolumeSpecName "kube-api-access-r4m9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.507674 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-b76744b8b-5ws6g"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.517434 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.521927 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.525997 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.527767 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.533330 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.544430 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.560650 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.576407 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.581793 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novacell03102-account-delete-pwxhc"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.584841 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.586300 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novacell03102-account-delete-pwxhc"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.590939 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/memcached-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.601897 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/memcached-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.603056 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4m9k\" (UniqueName: \"kubernetes.io/projected/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-kube-api-access-r4m9k\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.603080 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.604565 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.610425 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.704898 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmhmm\" (UniqueName: \"kubernetes.io/projected/79476096-5d34-4e8a-9f33-3127bacf4e60-kube-api-access-rmhmm\") pod \"79476096-5d34-4e8a-9f33-3127bacf4e60\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705123 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4fbc\" (UniqueName: \"kubernetes.io/projected/650c7a92-1469-4a9c-9a60-a846fe7ed823-kube-api-access-v4fbc\") pod \"650c7a92-1469-4a9c-9a60-a846fe7ed823\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705152 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-ovndb-tls-certs\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705175 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-internal-tls-certs\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705201 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-config\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705245 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-public-tls-certs\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705266 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-httpd-config\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705292 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnnfn\" (UniqueName: \"kubernetes.io/projected/d7a5f9e1-9167-418e-8e1e-57e645d31785-kube-api-access-mnnfn\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705311 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-combined-ca-bundle\") pod \"d7a5f9e1-9167-418e-8e1e-57e645d31785\" (UID: \"d7a5f9e1-9167-418e-8e1e-57e645d31785\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705416 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts\") pod \"650c7a92-1469-4a9c-9a60-a846fe7ed823\" (UID: \"650c7a92-1469-4a9c-9a60-a846fe7ed823\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.705467 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts\") pod \"79476096-5d34-4e8a-9f33-3127bacf4e60\" (UID: \"79476096-5d34-4e8a-9f33-3127bacf4e60\") " Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.706920 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "650c7a92-1469-4a9c-9a60-a846fe7ed823" (UID: "650c7a92-1469-4a9c-9a60-a846fe7ed823"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.707010 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79476096-5d34-4e8a-9f33-3127bacf4e60" (UID: "79476096-5d34-4e8a-9f33-3127bacf4e60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.709830 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7a5f9e1-9167-418e-8e1e-57e645d31785-kube-api-access-mnnfn" (OuterVolumeSpecName: "kube-api-access-mnnfn") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "kube-api-access-mnnfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.710075 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/650c7a92-1469-4a9c-9a60-a846fe7ed823-kube-api-access-v4fbc" (OuterVolumeSpecName: "kube-api-access-v4fbc") pod "650c7a92-1469-4a9c-9a60-a846fe7ed823" (UID: "650c7a92-1469-4a9c-9a60-a846fe7ed823"). InnerVolumeSpecName "kube-api-access-v4fbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.717339 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79476096-5d34-4e8a-9f33-3127bacf4e60-kube-api-access-rmhmm" (OuterVolumeSpecName: "kube-api-access-rmhmm") pod "79476096-5d34-4e8a-9f33-3127bacf4e60" (UID: "79476096-5d34-4e8a-9f33-3127bacf4e60"). InnerVolumeSpecName "kube-api-access-rmhmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.717549 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.750575 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.758179 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-config" (OuterVolumeSpecName: "config") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.768957 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.770161 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.771960 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d7a5f9e1-9167-418e-8e1e-57e645d31785" (UID: "d7a5f9e1-9167-418e-8e1e-57e645d31785"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807424 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4fbc\" (UniqueName: \"kubernetes.io/projected/650c7a92-1469-4a9c-9a60-a846fe7ed823-kube-api-access-v4fbc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807471 4774 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807485 4774 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807493 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807504 4774 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807514 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807524 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnnfn\" (UniqueName: \"kubernetes.io/projected/d7a5f9e1-9167-418e-8e1e-57e645d31785-kube-api-access-mnnfn\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807532 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7a5f9e1-9167-418e-8e1e-57e645d31785-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807542 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/650c7a92-1469-4a9c-9a60-a846fe7ed823-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807552 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79476096-5d34-4e8a-9f33-3127bacf4e60-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.807560 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmhmm\" (UniqueName: \"kubernetes.io/projected/79476096-5d34-4e8a-9f33-3127bacf4e60-kube-api-access-rmhmm\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.914854 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutronfb24-account-delete-z2nw8" event={"ID":"650c7a92-1469-4a9c-9a60-a846fe7ed823","Type":"ContainerDied","Data":"4992742ddc6a9a496a1fb0493b0cc7978879fde993035329bf760388243535c7"} Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.914921 4774 scope.go:117] "RemoveContainer" containerID="d6a7d7a7c3bed4f014ad9ebd5dbb104ef9c38e3678d10bef857d3fe25fca8223" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.915041 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutronfb24-account-delete-z2nw8" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.934593 4774 generic.go:334] "Generic (PLEG): container finished" podID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerID="eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7" exitCode=0 Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.934780 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5546774f69-cpnh7" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.934790 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5546774f69-cpnh7" event={"ID":"d7a5f9e1-9167-418e-8e1e-57e645d31785","Type":"ContainerDied","Data":"eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7"} Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.934995 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5546774f69-cpnh7" event={"ID":"d7a5f9e1-9167-418e-8e1e-57e645d31785","Type":"ContainerDied","Data":"ad6ea00c94a7c73844bf4ddf0266295efee38366f85c5ecca5db0df858b2ff39"} Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.941691 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/novaapi242e-account-delete-tbszv" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.941874 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/novaapi242e-account-delete-tbszv" event={"ID":"58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad","Type":"ContainerDied","Data":"135851e48970ebaf5a4181ed44a5d612b81aa0f247e630e0d4aefafd44482dac"} Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.964781 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutronfb24-account-delete-z2nw8"] Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.966602 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican6523-account-delete-t2fnw" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.967632 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican6523-account-delete-t2fnw" event={"ID":"79476096-5d34-4e8a-9f33-3127bacf4e60","Type":"ContainerDied","Data":"ef6c0cf23a2d392647552b21088565764cbcc3e64b0777984f7a131de5f28567"} Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.968300 4774 scope.go:117] "RemoveContainer" containerID="1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28" Nov 21 14:28:26 crc kubenswrapper[4774]: I1121 14:28:26.971006 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutronfb24-account-delete-z2nw8"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.017984 4774 scope.go:117] "RemoveContainer" containerID="eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7" Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.023880 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican6523-account-delete-t2fnw"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.034778 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican6523-account-delete-t2fnw"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.043752 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5546774f69-cpnh7"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.059362 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5546774f69-cpnh7"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.060856 4774 scope.go:117] "RemoveContainer" containerID="1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28" Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.061760 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28\": container with ID starting with 1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28 not found: ID does not exist" containerID="1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28" Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.061880 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28"} err="failed to get container status \"1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28\": rpc error: code = NotFound desc = could not find container \"1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28\": container with ID starting with 1ec35b6e76e4adfd261772706d2629f9e67dcba30547610b8f013bfc97658c28 not found: ID does not exist" Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.061974 4774 scope.go:117] "RemoveContainer" containerID="eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7" Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.062400 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7\": container with ID starting with eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7 not found: ID does not exist" containerID="eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7" Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.062499 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7"} err="failed to get container status \"eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7\": rpc error: code = NotFound desc = could not find container \"eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7\": container with ID starting with eb75b6a7228cf51ba088a0d931b9d8809f4fd5671d38c7a50ddf517a232a10c7 not found: ID does not exist" Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.062562 4774 scope.go:117] "RemoveContainer" containerID="49d17f16cc51398f705c8fb2c63a51d77570b36bf271a56c53df10dddd9ceca5" Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.066055 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/novaapi242e-account-delete-tbszv"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.078338 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/novaapi242e-account-delete-tbszv"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.091709 4774 scope.go:117] "RemoveContainer" containerID="3b0b0a6a69ecdbec432bc57521dfde480ebde0b424e3bfb51f0ad2b0bfd138f8" Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.362792 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.363453 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.363933 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.363984 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.364052 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:27 crc kubenswrapper[4774]: I1121 14:28:27.365689 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/memcached-0" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.105:11211: i/o timeout" Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.366610 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.368108 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:27 crc kubenswrapper[4774]: E1121 14:28:27.368183 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.105513 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3311b6bd-a19b-402c-afe4-22222098c669" path="/var/lib/kubelet/pods/3311b6bd-a19b-402c-afe4-22222098c669/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.106888 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" path="/var/lib/kubelet/pods/4c16af5b-77af-4097-ad41-42aaa0aac4a1/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.107464 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57cdbc4f-20e9-4189-872d-f6f3c58f7093" path="/var/lib/kubelet/pods/57cdbc4f-20e9-4189-872d-f6f3c58f7093/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.108629 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" path="/var/lib/kubelet/pods/58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.109166 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" path="/var/lib/kubelet/pods/612a4642-7af7-4d93-a27f-e63a0593a511/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.109929 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" path="/var/lib/kubelet/pods/64e33a39-c371-477f-b1c9-d58189db4bc8/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.110982 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" path="/var/lib/kubelet/pods/650c7a92-1469-4a9c-9a60-a846fe7ed823/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.111646 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" path="/var/lib/kubelet/pods/7030e5d8-2d2b-4cc5-a283-339599595a18/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.112947 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" path="/var/lib/kubelet/pods/75187d0f-77b0-45ee-a452-1850f0fe7851/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.113733 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" path="/var/lib/kubelet/pods/79476096-5d34-4e8a-9f33-3127bacf4e60/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.114268 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" path="/var/lib/kubelet/pods/7bf981c0-8ff6-493c-a5fc-14610df3b362/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.115496 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" path="/var/lib/kubelet/pods/98c89c8e-6557-46b4-adf8-f954dfff68b3/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.116130 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" path="/var/lib/kubelet/pods/d7a5f9e1-9167-418e-8e1e-57e645d31785/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.116739 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" path="/var/lib/kubelet/pods/dc1f1975-32c8-494c-b6c7-69a72353879f/volumes" Nov 21 14:28:28 crc kubenswrapper[4774]: I1121 14:28:28.118114 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" path="/var/lib/kubelet/pods/f38fad89-cd6f-47d4-82f9-a761f6a9ed9e/volumes" Nov 21 14:28:29 crc kubenswrapper[4774]: I1121 14:28:29.427426 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: i/o timeout" Nov 21 14:28:30 crc kubenswrapper[4774]: E1121 14:28:30.583837 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:30 crc kubenswrapper[4774]: E1121 14:28:30.584311 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:28:46.584290689 +0000 UTC m=+1517.236489968 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.362535 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.365053 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.365565 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.366043 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.366254 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.367745 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.370204 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:32 crc kubenswrapper[4774]: E1121 14:28:32.370451 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.362619 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.363798 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.364486 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.364569 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.364909 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.367055 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.371785 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:37 crc kubenswrapper[4774]: E1121 14:28:37.371889 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.362636 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.364370 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.364907 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.365208 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 is running failed: container process not found" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.365524 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.368294 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.370664 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Nov 21 14:28:42 crc kubenswrapper[4774]: E1121 14:28:42.370750 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/ovn-controller-ovs-ld98r" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.185772 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ld98r_124a9a6f-df08-4085-96d6-0a72f2bb2855/ovs-vswitchd/0.log" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.188056 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.202326 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-ld98r_124a9a6f-df08-4085-96d6-0a72f2bb2855/ovs-vswitchd/0.log" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.203387 4774 generic.go:334] "Generic (PLEG): container finished" podID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" exitCode=137 Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.203438 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerDied","Data":"b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd"} Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.203484 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-ld98r" event={"ID":"124a9a6f-df08-4085-96d6-0a72f2bb2855","Type":"ContainerDied","Data":"cdbcbbaf9d43029e5d48c9f20c7b0821e73c5c3c6edc8665165b2502719db633"} Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.203486 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-ld98r" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.203540 4774 scope.go:117] "RemoveContainer" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.242550 4774 scope.go:117] "RemoveContainer" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.266630 4774 scope.go:117] "RemoveContainer" containerID="bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.293296 4774 scope.go:117] "RemoveContainer" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" Nov 21 14:28:44 crc kubenswrapper[4774]: E1121 14:28:44.293924 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd\": container with ID starting with b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd not found: ID does not exist" containerID="b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.293980 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd"} err="failed to get container status \"b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd\": rpc error: code = NotFound desc = could not find container \"b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd\": container with ID starting with b9bbefb266d0b3564efb749e6572e0b22efa903a33546f6a57d0cc39c30ccdcd not found: ID does not exist" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.294022 4774 scope.go:117] "RemoveContainer" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" Nov 21 14:28:44 crc kubenswrapper[4774]: E1121 14:28:44.294613 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3\": container with ID starting with 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 not found: ID does not exist" containerID="08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.294658 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3"} err="failed to get container status \"08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3\": rpc error: code = NotFound desc = could not find container \"08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3\": container with ID starting with 08f7fc307f4c47709045c198db23d607148c0a619ad0310b6a032df65e4cd4f3 not found: ID does not exist" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.294692 4774 scope.go:117] "RemoveContainer" containerID="bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3" Nov 21 14:28:44 crc kubenswrapper[4774]: E1121 14:28:44.296517 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3\": container with ID starting with bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3 not found: ID does not exist" containerID="bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.296570 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3"} err="failed to get container status \"bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3\": rpc error: code = NotFound desc = could not find container \"bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3\": container with ID starting with bf7cb74b44be89be201e2ce67eb220507aa9f8df6a5d02065d5434bb1bd923f3 not found: ID does not exist" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330446 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/124a9a6f-df08-4085-96d6-0a72f2bb2855-scripts\") pod \"124a9a6f-df08-4085-96d6-0a72f2bb2855\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330520 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-run\") pod \"124a9a6f-df08-4085-96d6-0a72f2bb2855\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330568 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf6vj\" (UniqueName: \"kubernetes.io/projected/124a9a6f-df08-4085-96d6-0a72f2bb2855-kube-api-access-gf6vj\") pod \"124a9a6f-df08-4085-96d6-0a72f2bb2855\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330612 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-lib\") pod \"124a9a6f-df08-4085-96d6-0a72f2bb2855\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330648 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-log\") pod \"124a9a6f-df08-4085-96d6-0a72f2bb2855\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330718 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-etc-ovs\") pod \"124a9a6f-df08-4085-96d6-0a72f2bb2855\" (UID: \"124a9a6f-df08-4085-96d6-0a72f2bb2855\") " Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330734 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-run" (OuterVolumeSpecName: "var-run") pod "124a9a6f-df08-4085-96d6-0a72f2bb2855" (UID: "124a9a6f-df08-4085-96d6-0a72f2bb2855"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330805 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "124a9a6f-df08-4085-96d6-0a72f2bb2855" (UID: "124a9a6f-df08-4085-96d6-0a72f2bb2855"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330772 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-lib" (OuterVolumeSpecName: "var-lib") pod "124a9a6f-df08-4085-96d6-0a72f2bb2855" (UID: "124a9a6f-df08-4085-96d6-0a72f2bb2855"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.330886 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-log" (OuterVolumeSpecName: "var-log") pod "124a9a6f-df08-4085-96d6-0a72f2bb2855" (UID: "124a9a6f-df08-4085-96d6-0a72f2bb2855"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.331720 4774 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-run\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.331767 4774 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-lib\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.331789 4774 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-var-log\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.331808 4774 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/124a9a6f-df08-4085-96d6-0a72f2bb2855-etc-ovs\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.332383 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/124a9a6f-df08-4085-96d6-0a72f2bb2855-scripts" (OuterVolumeSpecName: "scripts") pod "124a9a6f-df08-4085-96d6-0a72f2bb2855" (UID: "124a9a6f-df08-4085-96d6-0a72f2bb2855"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.339809 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/124a9a6f-df08-4085-96d6-0a72f2bb2855-kube-api-access-gf6vj" (OuterVolumeSpecName: "kube-api-access-gf6vj") pod "124a9a6f-df08-4085-96d6-0a72f2bb2855" (UID: "124a9a6f-df08-4085-96d6-0a72f2bb2855"). InnerVolumeSpecName "kube-api-access-gf6vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.432860 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/124a9a6f-df08-4085-96d6-0a72f2bb2855-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.432907 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf6vj\" (UniqueName: \"kubernetes.io/projected/124a9a6f-df08-4085-96d6-0a72f2bb2855-kube-api-access-gf6vj\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.543692 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ovs-ld98r"] Nov 21 14:28:44 crc kubenswrapper[4774]: I1121 14:28:44.553103 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ovs-ld98r"] Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.223556 4774 generic.go:334] "Generic (PLEG): container finished" podID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerID="01b0a87790fffb8562d8320c5dbbbc5a07eb54a2e1277dfed78d3269edb2bee5" exitCode=137 Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.223629 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"01b0a87790fffb8562d8320c5dbbbc5a07eb54a2e1277dfed78d3269edb2bee5"} Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.473069 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.652755 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.653014 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bm22n\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-kube-api-access-bm22n\") pod \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.653082 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-lock\") pod \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.653204 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") pod \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.653307 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-cache\") pod \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\" (UID: \"6cde8d60-bdf9-405f-8991-5c1f55b0ee76\") " Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.653882 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-lock" (OuterVolumeSpecName: "lock") pod "6cde8d60-bdf9-405f-8991-5c1f55b0ee76" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.654496 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-cache" (OuterVolumeSpecName: "cache") pod "6cde8d60-bdf9-405f-8991-5c1f55b0ee76" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.660523 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "6cde8d60-bdf9-405f-8991-5c1f55b0ee76" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.660544 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-kube-api-access-bm22n" (OuterVolumeSpecName: "kube-api-access-bm22n") pod "6cde8d60-bdf9-405f-8991-5c1f55b0ee76" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76"). InnerVolumeSpecName "kube-api-access-bm22n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.665023 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6cde8d60-bdf9-405f-8991-5c1f55b0ee76" (UID: "6cde8d60-bdf9-405f-8991-5c1f55b0ee76"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.755293 4774 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-etc-swift\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.755342 4774 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-cache\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.755412 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.755433 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bm22n\" (UniqueName: \"kubernetes.io/projected/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-kube-api-access-bm22n\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.755455 4774 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/6cde8d60-bdf9-405f-8991-5c1f55b0ee76-lock\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.785586 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Nov 21 14:28:45 crc kubenswrapper[4774]: I1121 14:28:45.859293 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.104320 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" path="/var/lib/kubelet/pods/124a9a6f-df08-4085-96d6-0a72f2bb2855/volumes" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.241876 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"6cde8d60-bdf9-405f-8991-5c1f55b0ee76","Type":"ContainerDied","Data":"a49286afe01eea3dff82dd38e3ea2a2c9e9de8138caa4af5df5fc4cbab4325a5"} Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.242031 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.242915 4774 scope.go:117] "RemoveContainer" containerID="01b0a87790fffb8562d8320c5dbbbc5a07eb54a2e1277dfed78d3269edb2bee5" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.270790 4774 scope.go:117] "RemoveContainer" containerID="89801dafc1d6b54a7d5db86bdd9ef9aa021a679876daee4fe43e50ca59175ae5" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.273260 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-storage-0"] Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.279784 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-storage-0"] Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.292637 4774 scope.go:117] "RemoveContainer" containerID="2dce44da6f6202c7964d5937a707b66b8c0555f55b1d955191986f35ef80726a" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.322068 4774 scope.go:117] "RemoveContainer" containerID="834a28d7bd2427951828771181afd8a938666b3888becfb912c78842574fb9ae" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.344959 4774 scope.go:117] "RemoveContainer" containerID="cd578a4be6466cb961f25126446b01ae08dfe77292401d7b6dc5269637ee2e33" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.372345 4774 scope.go:117] "RemoveContainer" containerID="1c4505cc2138852bc1de85d4a3368df20df9e8fc72c0b4f0a772d89a565a9d5c" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.396726 4774 scope.go:117] "RemoveContainer" containerID="4a13a7da01eb78f1caaf1ffc112b4e611dc9d20280166283d224a8d79da6a2d5" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.422947 4774 scope.go:117] "RemoveContainer" containerID="53a824add7ac0cc57042d70c06b911c7e6a34e1c2010603ee4d6fbc3ed438924" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.460753 4774 scope.go:117] "RemoveContainer" containerID="51b9f59856cff7bf6c2e7c193206a014c3b1c1b6ff7e65f0ffe94ef9fbaf701e" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.489142 4774 scope.go:117] "RemoveContainer" containerID="b0ff7c749c18817ad064c15649c712f4e89466819f6dd77e940ca84ed95e90a8" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.520180 4774 scope.go:117] "RemoveContainer" containerID="b02da81747033bd11ee8ad86892e420553f3c4e14b394a17b83ad199bf283c8e" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.552968 4774 scope.go:117] "RemoveContainer" containerID="78cf57423bdba8a0adb0930011b88d9283fb739e1b67a73287f7ff3ca582a4a1" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.588194 4774 scope.go:117] "RemoveContainer" containerID="4a079344642c9fb3a26394a82468d59daabece732a5466662ee8aeaa883a5bb3" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.630306 4774 scope.go:117] "RemoveContainer" containerID="d3b647fd9ca3744848c9bba9996b244e70638e808df6e12566f545983a15f3cc" Nov 21 14:28:46 crc kubenswrapper[4774]: I1121 14:28:46.662885 4774 scope.go:117] "RemoveContainer" containerID="9bc27234572696e44f557a383a86c888ed805788bafa91dd14bb78cdefab3b32" Nov 21 14:28:46 crc kubenswrapper[4774]: E1121 14:28:46.677051 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:28:46 crc kubenswrapper[4774]: E1121 14:28:46.677160 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:29:18.677133562 +0000 UTC m=+1549.329332861 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:28:48 crc kubenswrapper[4774]: I1121 14:28:48.111082 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" path="/var/lib/kubelet/pods/6cde8d60-bdf9-405f-8991-5c1f55b0ee76/volumes" Nov 21 14:28:49 crc kubenswrapper[4774]: I1121 14:28:49.483505 4774 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod7ee04f12-987f-4f31-81b3-10cd067af310"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod7ee04f12-987f-4f31-81b3-10cd067af310] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7ee04f12_987f_4f31_81b3_10cd067af310.slice" Nov 21 14:28:49 crc kubenswrapper[4774]: E1121 14:28:49.483847 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod7ee04f12-987f-4f31-81b3-10cd067af310] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod7ee04f12-987f-4f31-81b3-10cd067af310] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7ee04f12_987f_4f31_81b3_10cd067af310.slice" pod="openstack/ovn-controller-2sxpw" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" Nov 21 14:28:50 crc kubenswrapper[4774]: I1121 14:28:50.301502 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-2sxpw" Nov 21 14:28:50 crc kubenswrapper[4774]: I1121 14:28:50.345616 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-2sxpw"] Nov 21 14:28:50 crc kubenswrapper[4774]: I1121 14:28:50.351845 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-2sxpw"] Nov 21 14:28:52 crc kubenswrapper[4774]: I1121 14:28:52.111932 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" path="/var/lib/kubelet/pods/7ee04f12-987f-4f31-81b3-10cd067af310/volumes" Nov 21 14:28:59 crc kubenswrapper[4774]: I1121 14:28:59.600520 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:28:59 crc kubenswrapper[4774]: I1121 14:28:59.601116 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:29:18 crc kubenswrapper[4774]: E1121 14:29:18.694976 4774 configmap.go:193] Couldn't get configMap openstack/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Nov 21 14:29:18 crc kubenswrapper[4774]: E1121 14:29:18.695694 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data podName:e2685b76-2150-4209-a55b-a989ae40b7db nodeName:}" failed. No retries permitted until 2025-11-21 14:30:22.695673288 +0000 UTC m=+1613.347872547 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data") pod "rabbitmq-server-0" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db") : configmap "rabbitmq-config-data" not found Nov 21 14:29:19 crc kubenswrapper[4774]: E1121 14:29:19.830918 4774 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Nov 21 14:29:19 crc kubenswrapper[4774]: command '/bin/bash -c if [ ! -z "$(cat /etc/pod-info/skipPreStopChecks)" ]; then exit 0; fi; rabbitmq-upgrade await_online_quorum_plus_one -t 604800 && rabbitmq-upgrade await_online_synchronized_mirror -t 604800 || true && rabbitmq-upgrade drain -t 604800' exited with 69: Error: unable to perform an operation on node 'rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'. Please see diagnostics information and suggestions below. Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Most common reasons for this are: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is unreachable (e.g. due to hostname resolution, TCP connection or firewall issues) Nov 21 14:29:19 crc kubenswrapper[4774]: * CLI tool fails to authenticate with the server (e.g. due to CLI tool's Erlang cookie not matching that of the server) Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is not running Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: In addition to the diagnostics info below: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * See the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more Nov 21 14:29:19 crc kubenswrapper[4774]: * Consult server logs on node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack Nov 21 14:29:19 crc kubenswrapper[4774]: * If target node is configured to use long node names, don't forget to use --longnames with CLI tools Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: DIAGNOSTICS Nov 21 14:29:19 crc kubenswrapper[4774]: =========== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: attempted to contact: ['rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'] Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack: Nov 21 14:29:19 crc kubenswrapper[4774]: * unable to connect to epmd (port 4369) on rabbitmq-server-0.rabbitmq-nodes.openstack: nxdomain (non-existing domain) Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Current node details: Nov 21 14:29:19 crc kubenswrapper[4774]: * node name: 'rabbitmqcli-209-rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack' Nov 21 14:29:19 crc kubenswrapper[4774]: * effective user's home directory: /var/lib/rabbitmq Nov 21 14:29:19 crc kubenswrapper[4774]: * Erlang cookie hash: gH4BAvUjTtIm9AAYOnmm8g== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Error: unable to perform an operation on node 'rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'. Please see diagnostics information and suggestions below. Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Most common reasons for this are: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is unreachable (e.g. due to hostname resolution, TCP connection or firewall issues) Nov 21 14:29:19 crc kubenswrapper[4774]: * CLI tool fails to authenticate with the server (e.g. due to CLI tool's Erlang cookie not matching that of the server) Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is not running Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: In addition to the diagnostics info below: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * See the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more Nov 21 14:29:19 crc kubenswrapper[4774]: * Consult server logs on node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack Nov 21 14:29:19 crc kubenswrapper[4774]: * If target node is configured to use long node names, don't forget to use --longnames with CLI tools Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: DIAGNOSTICS Nov 21 14:29:19 crc kubenswrapper[4774]: =========== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: attempted to contact: ['rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'] Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack: Nov 21 14:29:19 crc kubenswrapper[4774]: * unable to connect to epmd (port 4369) on rabbitmq-server-0.rabbitmq-nodes.openstack: nxdomain (non-existing domain) Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Current node details: Nov 21 14:29:19 crc kubenswrapper[4774]: * node name: 'rabbitmqcli-115-rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack' Nov 21 14:29:19 crc kubenswrapper[4774]: * effective user's home directory: /var/lib/rabbitmq Nov 21 14:29:19 crc kubenswrapper[4774]: * Erlang cookie hash: gH4BAvUjTtIm9AAYOnmm8g== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: > execCommand=["/bin/bash","-c","if [ ! -z \"$(cat /etc/pod-info/skipPreStopChecks)\" ]; then exit 0; fi; rabbitmq-upgrade await_online_quorum_plus_one -t 604800 \u0026\u0026 rabbitmq-upgrade await_online_synchronized_mirror -t 604800 || true \u0026\u0026 rabbitmq-upgrade drain -t 604800"] containerName="rabbitmq" pod="openstack/rabbitmq-server-0" message=< Nov 21 14:29:19 crc kubenswrapper[4774]: Will put node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack into maintenance mode. The node will no longer serve any client traffic! Nov 21 14:29:19 crc kubenswrapper[4774]: Error: unable to perform an operation on node 'rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'. Please see diagnostics information and suggestions below. Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Most common reasons for this are: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is unreachable (e.g. due to hostname resolution, TCP connection or firewall issues) Nov 21 14:29:19 crc kubenswrapper[4774]: * CLI tool fails to authenticate with the server (e.g. due to CLI tool's Erlang cookie not matching that of the server) Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is not running Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: In addition to the diagnostics info below: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * See the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more Nov 21 14:29:19 crc kubenswrapper[4774]: * Consult server logs on node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack Nov 21 14:29:19 crc kubenswrapper[4774]: * If target node is configured to use long node names, don't forget to use --longnames with CLI tools Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: DIAGNOSTICS Nov 21 14:29:19 crc kubenswrapper[4774]: =========== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: attempted to contact: ['rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'] Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack: Nov 21 14:29:19 crc kubenswrapper[4774]: * unable to connect to epmd (port 4369) on rabbitmq-server-0.rabbitmq-nodes.openstack: nxdomain (non-existing domain) Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Current node details: Nov 21 14:29:19 crc kubenswrapper[4774]: * node name: 'rabbitmqcli-209-rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack' Nov 21 14:29:19 crc kubenswrapper[4774]: * effective user's home directory: /var/lib/rabbitmq Nov 21 14:29:19 crc kubenswrapper[4774]: * Erlang cookie hash: gH4BAvUjTtIm9AAYOnmm8g== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Error: unable to perform an operation on node 'rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'. Please see diagnostics information and suggestions below. Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Most common reasons for this are: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is unreachable (e.g. due to hostname resolution, TCP connection or firewall issues) Nov 21 14:29:19 crc kubenswrapper[4774]: * CLI tool fails to authenticate with the server (e.g. due to CLI tool's Erlang cookie not matching that of the server) Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is not running Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: In addition to the diagnostics info below: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * See the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more Nov 21 14:29:19 crc kubenswrapper[4774]: * Consult server logs on node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack Nov 21 14:29:19 crc kubenswrapper[4774]: * If target node is configured to use long node names, don't forget to use --longnames with CLI tools Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: DIAGNOSTICS Nov 21 14:29:19 crc kubenswrapper[4774]: =========== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: attempted to contact: ['rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'] Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack: Nov 21 14:29:19 crc kubenswrapper[4774]: * unable to connect to epmd (port 4369) on rabbitmq-server-0.rabbitmq-nodes.openstack: nxdomain (non-existing domain) Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Current node details: Nov 21 14:29:19 crc kubenswrapper[4774]: * node name: 'rabbitmqcli-115-rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack' Nov 21 14:29:19 crc kubenswrapper[4774]: * effective user's home directory: /var/lib/rabbitmq Nov 21 14:29:19 crc kubenswrapper[4774]: * Erlang cookie hash: gH4BAvUjTtIm9AAYOnmm8g== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: > Nov 21 14:29:19 crc kubenswrapper[4774]: E1121 14:29:19.830982 4774 kuberuntime_container.go:691] "PreStop hook failed" err=< Nov 21 14:29:19 crc kubenswrapper[4774]: command '/bin/bash -c if [ ! -z "$(cat /etc/pod-info/skipPreStopChecks)" ]; then exit 0; fi; rabbitmq-upgrade await_online_quorum_plus_one -t 604800 && rabbitmq-upgrade await_online_synchronized_mirror -t 604800 || true && rabbitmq-upgrade drain -t 604800' exited with 69: Error: unable to perform an operation on node 'rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'. Please see diagnostics information and suggestions below. Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Most common reasons for this are: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is unreachable (e.g. due to hostname resolution, TCP connection or firewall issues) Nov 21 14:29:19 crc kubenswrapper[4774]: * CLI tool fails to authenticate with the server (e.g. due to CLI tool's Erlang cookie not matching that of the server) Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is not running Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: In addition to the diagnostics info below: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * See the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more Nov 21 14:29:19 crc kubenswrapper[4774]: * Consult server logs on node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack Nov 21 14:29:19 crc kubenswrapper[4774]: * If target node is configured to use long node names, don't forget to use --longnames with CLI tools Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: DIAGNOSTICS Nov 21 14:29:19 crc kubenswrapper[4774]: =========== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: attempted to contact: ['rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'] Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack: Nov 21 14:29:19 crc kubenswrapper[4774]: * unable to connect to epmd (port 4369) on rabbitmq-server-0.rabbitmq-nodes.openstack: nxdomain (non-existing domain) Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Current node details: Nov 21 14:29:19 crc kubenswrapper[4774]: * node name: 'rabbitmqcli-209-rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack' Nov 21 14:29:19 crc kubenswrapper[4774]: * effective user's home directory: /var/lib/rabbitmq Nov 21 14:29:19 crc kubenswrapper[4774]: * Erlang cookie hash: gH4BAvUjTtIm9AAYOnmm8g== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Error: unable to perform an operation on node 'rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'. Please see diagnostics information and suggestions below. Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Most common reasons for this are: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is unreachable (e.g. due to hostname resolution, TCP connection or firewall issues) Nov 21 14:29:19 crc kubenswrapper[4774]: * CLI tool fails to authenticate with the server (e.g. due to CLI tool's Erlang cookie not matching that of the server) Nov 21 14:29:19 crc kubenswrapper[4774]: * Target node is not running Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: In addition to the diagnostics info below: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: * See the CLI, clustering and networking guides on https://rabbitmq.com/documentation.html to learn more Nov 21 14:29:19 crc kubenswrapper[4774]: * Consult server logs on node rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack Nov 21 14:29:19 crc kubenswrapper[4774]: * If target node is configured to use long node names, don't forget to use --longnames with CLI tools Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: DIAGNOSTICS Nov 21 14:29:19 crc kubenswrapper[4774]: =========== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: attempted to contact: ['rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack'] Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack: Nov 21 14:29:19 crc kubenswrapper[4774]: * unable to connect to epmd (port 4369) on rabbitmq-server-0.rabbitmq-nodes.openstack: nxdomain (non-existing domain) Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: Current node details: Nov 21 14:29:19 crc kubenswrapper[4774]: * node name: 'rabbitmqcli-115-rabbit@rabbitmq-server-0.rabbitmq-nodes.openstack' Nov 21 14:29:19 crc kubenswrapper[4774]: * effective user's home directory: /var/lib/rabbitmq Nov 21 14:29:19 crc kubenswrapper[4774]: * Erlang cookie hash: gH4BAvUjTtIm9AAYOnmm8g== Nov 21 14:29:19 crc kubenswrapper[4774]: Nov 21 14:29:19 crc kubenswrapper[4774]: > pod="openstack/rabbitmq-server-0" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="rabbitmq" containerID="cri-o://6bc151b541f61d18fa6a5bd4d47d620e359bf3af5784a910604388f99bb6a180" Nov 21 14:29:19 crc kubenswrapper[4774]: I1121 14:29:19.831022 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="rabbitmq" containerID="cri-o://6bc151b541f61d18fa6a5bd4d47d620e359bf3af5784a910604388f99bb6a180" gracePeriod=604738 Nov 21 14:29:23 crc kubenswrapper[4774]: I1121 14:29:23.773367 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.101:5671: connect: connection refused" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.082714 4774 generic.go:334] "Generic (PLEG): container finished" podID="e2685b76-2150-4209-a55b-a989ae40b7db" containerID="6bc151b541f61d18fa6a5bd4d47d620e359bf3af5784a910604388f99bb6a180" exitCode=0 Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.082881 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2685b76-2150-4209-a55b-a989ae40b7db","Type":"ContainerDied","Data":"6bc151b541f61d18fa6a5bd4d47d620e359bf3af5784a910604388f99bb6a180"} Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.502713 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648303 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-erlang-cookie\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648425 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-tls\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648519 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z667l\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-kube-api-access-z667l\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648661 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648714 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-plugins\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648764 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-plugins-conf\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648860 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-server-conf\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.648937 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2685b76-2150-4209-a55b-a989ae40b7db-erlang-cookie-secret\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.649083 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.649298 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-confd\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.649361 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2685b76-2150-4209-a55b-a989ae40b7db-pod-info\") pod \"e2685b76-2150-4209-a55b-a989ae40b7db\" (UID: \"e2685b76-2150-4209-a55b-a989ae40b7db\") " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.649662 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.650194 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.650812 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.650956 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.650978 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.661108 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2685b76-2150-4209-a55b-a989ae40b7db-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.661162 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.661202 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.661224 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-kube-api-access-z667l" (OuterVolumeSpecName: "kube-api-access-z667l") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "kube-api-access-z667l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.663653 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e2685b76-2150-4209-a55b-a989ae40b7db-pod-info" (OuterVolumeSpecName: "pod-info") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.689305 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data" (OuterVolumeSpecName: "config-data") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.731222 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-server-conf" (OuterVolumeSpecName: "server-conf") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.751933 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.751985 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z667l\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-kube-api-access-z667l\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.752011 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.752029 4774 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.752049 4774 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e2685b76-2150-4209-a55b-a989ae40b7db-server-conf\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.752066 4774 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e2685b76-2150-4209-a55b-a989ae40b7db-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.752123 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.752142 4774 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e2685b76-2150-4209-a55b-a989ae40b7db-pod-info\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.777156 4774 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.785323 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e2685b76-2150-4209-a55b-a989ae40b7db" (UID: "e2685b76-2150-4209-a55b-a989ae40b7db"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.853189 4774 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:26 crc kubenswrapper[4774]: I1121 14:29:26.853226 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e2685b76-2150-4209-a55b-a989ae40b7db-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 21 14:29:27 crc kubenswrapper[4774]: I1121 14:29:27.106753 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"e2685b76-2150-4209-a55b-a989ae40b7db","Type":"ContainerDied","Data":"3f1e87645ff873cb4e896241e1e200680f38a3108fb04275c3485c50a8840fc2"} Nov 21 14:29:27 crc kubenswrapper[4774]: I1121 14:29:27.106940 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 14:29:27 crc kubenswrapper[4774]: I1121 14:29:27.107011 4774 scope.go:117] "RemoveContainer" containerID="6bc151b541f61d18fa6a5bd4d47d620e359bf3af5784a910604388f99bb6a180" Nov 21 14:29:27 crc kubenswrapper[4774]: I1121 14:29:27.167345 4774 scope.go:117] "RemoveContainer" containerID="d28eebf85b23a893614d02f00de474df7cb0032d8a129eb8f057b60aeb7a3b5d" Nov 21 14:29:27 crc kubenswrapper[4774]: I1121 14:29:27.170919 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:29:27 crc kubenswrapper[4774]: I1121 14:29:27.182958 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 14:29:28 crc kubenswrapper[4774]: I1121 14:29:28.113690 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" path="/var/lib/kubelet/pods/e2685b76-2150-4209-a55b-a989ae40b7db/volumes" Nov 21 14:29:29 crc kubenswrapper[4774]: I1121 14:29:29.602325 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:29:29 crc kubenswrapper[4774]: I1121 14:29:29.602610 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.069414 4774 scope.go:117] "RemoveContainer" containerID="3cf0fc6137a3bf6ac4aa4be6ccbf88d67fbae484275815a29c62fc1117d49e8e" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.128130 4774 scope.go:117] "RemoveContainer" containerID="aaa15f882e1fd7018199c22c68333ff550fad91d1c6a777a6876fe84c7fc858b" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.158994 4774 scope.go:117] "RemoveContainer" containerID="ccd254ac8081ba427eba51acda723540d2c898bacd88918b0b167f5f8dc4e05c" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.179094 4774 scope.go:117] "RemoveContainer" containerID="61b465c77a81919460bfd27ff0b88f6f73822bcd4ca5c29028e54ff4fd70406e" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.213097 4774 scope.go:117] "RemoveContainer" containerID="4b636b9b604abd7e65fdf5cd245e45bed745018b346fa76088458d3add55daed" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.244937 4774 scope.go:117] "RemoveContainer" containerID="382d8b3ab93aaed3bce8e1c9baa71547be3be46908b0cf7410e4489e030eb0e9" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.268738 4774 scope.go:117] "RemoveContainer" containerID="3779ec4405b00452ac39136a499c21798d1756a48805e3682c76d529cb3c3f65" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.292410 4774 scope.go:117] "RemoveContainer" containerID="a63e72ed87ad6e5829063d53d36ef358e0c60f0ac83c22a939e23b0679064a2e" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.320394 4774 scope.go:117] "RemoveContainer" containerID="658d4d24fc6e94b57c7bb466d4271afd9667deeb008df3a43b13453eda4f811e" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.345046 4774 scope.go:117] "RemoveContainer" containerID="c8583eef8a391a28ea2dc5e764d94e0aa5490a82e94adc85f543fff3c67bdb93" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.363920 4774 scope.go:117] "RemoveContainer" containerID="d1073de69bf390fe30269d7b088a8b3fbfa034bdc8ef77499fb5ba4f9878eef7" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.384443 4774 scope.go:117] "RemoveContainer" containerID="69c2ce68633246110d46b63e32f22397b08e5e0b28d2e21b8332046a0b226d6e" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.404073 4774 scope.go:117] "RemoveContainer" containerID="7816a7aeb0079af33aea84708be11ff4efaf0a43340ae98f3d1b5f43d97183df" Nov 21 14:29:35 crc kubenswrapper[4774]: I1121 14:29:35.428417 4774 scope.go:117] "RemoveContainer" containerID="5a1430b8284950fa8feb2b26ca8f0daccce10ea31e4279be89c3cfb24be1dc61" Nov 21 14:29:59 crc kubenswrapper[4774]: I1121 14:29:59.600906 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:29:59 crc kubenswrapper[4774]: I1121 14:29:59.601585 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:29:59 crc kubenswrapper[4774]: I1121 14:29:59.601667 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:29:59 crc kubenswrapper[4774]: I1121 14:29:59.602740 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:29:59 crc kubenswrapper[4774]: I1121 14:29:59.603075 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" gracePeriod=600 Nov 21 14:29:59 crc kubenswrapper[4774]: E1121 14:29:59.733268 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148175 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln"] Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148800 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="ovsdbserver-nb" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148834 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="ovsdbserver-nb" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148848 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148855 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148870 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148876 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148884 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="rabbitmq" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148890 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="rabbitmq" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148900 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="dnsmasq-dns" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148905 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="dnsmasq-dns" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148913 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="setup-container" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148918 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="setup-container" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148926 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148932 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-server" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148942 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148947 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148958 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" containerName="memcached" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148963 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" containerName="memcached" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148974 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="proxy-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148979 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="proxy-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.148986 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.148992 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149001 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149006 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149014 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0f5811f-60f6-4820-b981-715448365e52" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149019 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0f5811f-60f6-4820-b981-715448365e52" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149032 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149038 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149045 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="424dddc1-7019-40ab-b405-a2dcaee08c65" containerName="kube-state-metrics" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149051 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="424dddc1-7019-40ab-b405-a2dcaee08c65" containerName="kube-state-metrics" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149062 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149068 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149084 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="rabbitmq" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149095 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="rabbitmq" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149110 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="sg-core" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149117 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="sg-core" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149139 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149146 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149155 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-expirer" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149162 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-expirer" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149175 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149181 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149193 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149201 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149215 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149223 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149233 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149239 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149249 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-central-agent" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149255 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-central-agent" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149263 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerName="galera" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149269 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerName="galera" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149275 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149283 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149293 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3311b6bd-a19b-402c-afe4-22222098c669" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149300 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3311b6bd-a19b-402c-afe4-22222098c669" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149314 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" containerName="nova-cell0-conductor-conductor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149322 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" containerName="nova-cell0-conductor-conductor" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149331 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149338 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149345 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149351 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149357 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149362 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149369 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149376 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-api" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149388 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149396 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149407 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a92903-9f60-4f44-917f-744a2b80a57c" containerName="nova-cell1-conductor-conductor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149416 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a92903-9f60-4f44-917f-744a2b80a57c" containerName="nova-cell1-conductor-conductor" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149424 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-updater" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149431 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-updater" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149445 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149451 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149460 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149468 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-server" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149482 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149489 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149505 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149513 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149523 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="swift-recon-cron" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149530 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="swift-recon-cron" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149541 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="rsync" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149547 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="rsync" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149556 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149563 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149572 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149582 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149593 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="probe" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149599 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="probe" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149611 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf5bbb4-9ebb-41b9-a888-4144660d088c" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149618 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf5bbb4-9ebb-41b9-a888-4144660d088c" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149627 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="ovsdbserver-sb" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149634 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="ovsdbserver-sb" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149646 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149653 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149686 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="cinder-scheduler" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149695 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="cinder-scheduler" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149703 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149710 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149720 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="init" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149727 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="init" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149737 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-notification-agent" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149745 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-notification-agent" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149754 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server-init" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149762 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server-init" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149773 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149781 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-api" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149799 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149809 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149843 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerName="mysql-bootstrap" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149851 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerName="mysql-bootstrap" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149864 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149871 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149882 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-reaper" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149889 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-reaper" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149898 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149905 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149913 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-updater" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149920 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-updater" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149929 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149936 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-server" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149946 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57cdbc4f-20e9-4189-872d-f6f3c58f7093" containerName="keystone-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149953 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="57cdbc4f-20e9-4189-872d-f6f3c58f7093" containerName="keystone-api" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149964 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a1181b-900b-40dc-9855-795653215df3" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149971 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a1181b-900b-40dc-9855-795653215df3" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.149983 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.149990 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150003 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150010 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-api" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150023 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150030 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150040 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150047 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-server" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150055 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4549a9b5-fb19-4dae-9fee-b03d5d49e95d" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150062 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4549a9b5-fb19-4dae-9fee-b03d5d49e95d" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150071 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" containerName="nova-scheduler-scheduler" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150080 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" containerName="nova-scheduler-scheduler" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150094 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150102 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150110 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150117 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150127 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150134 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150144 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a3ae90b-73bb-4fbf-887b-c6e432338502" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150151 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a3ae90b-73bb-4fbf-887b-c6e432338502" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150160 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150168 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150176 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="galera" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150184 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="galera" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150198 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150205 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-log" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150214 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150222 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150234 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150241 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150252 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150262 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150274 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="setup-container" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150282 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="setup-container" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.150290 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="mysql-bootstrap" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150297 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="mysql-bootstrap" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150458 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150475 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150490 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a3ae90b-73bb-4fbf-887b-c6e432338502" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150499 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-expirer" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150510 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150518 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150526 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf5bbb4-9ebb-41b9-a888-4144660d088c" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150533 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150547 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-metadata" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150560 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150569 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="swift-recon-cron" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150579 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150588 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="57cdbc4f-20e9-4189-872d-f6f3c58f7093" containerName="keystone-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150600 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150608 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="ovsdbserver-nb" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150620 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovsdb-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150634 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150644 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150652 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="98c89c8e-6557-46b4-adf8-f954dfff68b3" containerName="barbican-worker-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150660 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150671 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-notification-agent" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150683 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150694 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="cinder-scheduler" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150701 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150711 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150720 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="ovn-northd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150732 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8057ad05-b8c9-4742-a0e2-388f0a901595" containerName="barbican-keystone-listener" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150745 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150754 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="204761da-3cd3-4024-8268-2c4ade77be70" containerName="placement-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150764 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150777 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="75187d0f-77b0-45ee-a452-1850f0fe7851" containerName="probe" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150789 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fd4802-19c7-4e11-b776-c505c03206b0" containerName="glance-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150802 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ee04f12-987f-4f31-81b3-10cd067af310" containerName="ovn-controller" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150812 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150838 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b17b723-7e23-4a12-916e-0f2d00b72239" containerName="galera" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150847 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150858 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-updater" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150866 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="36597581-6c3f-42a7-98ba-155d3bb19320" containerName="glance-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150874 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150886 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c16af5b-77af-4097-ad41-42aaa0aac4a1" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150898 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-replicator" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150907 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="rsync" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150914 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a1181b-900b-40dc-9855-795653215df3" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150925 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="424dddc1-7019-40ab-b405-a2dcaee08c65" containerName="kube-state-metrics" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150934 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4549a9b5-fb19-4dae-9fee-b03d5d49e95d" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150947 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2685b76-2150-4209-a55b-a989ae40b7db" containerName="rabbitmq" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150957 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ae7ffea-af5d-4804-84cf-fa3c5edfbd27" containerName="nova-metadata-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150967 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-server" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150974 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e33a39-c371-477f-b1c9-d58189db4bc8" containerName="rabbitmq" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150988 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="ceilometer-central-agent" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.150999 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-reaper" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151009 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="sg-core" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151017 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a5f9e1-9167-418e-8e1e-57e645d31785" containerName="neutron-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151026 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bf981c0-8ff6-493c-a5fc-14610df3b362" containerName="cinder-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151036 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0f5811f-60f6-4820-b981-715448365e52" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151050 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0563658a-f1e8-4cae-b165-9697c4673895" containerName="ovsdbserver-sb" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151060 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1718aee5-94ce-4682-aa62-28843ff1e2ef" containerName="barbican-api" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151072 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f89a7785-0a49-4c28-a587-ec113d2f3635" containerName="dnsmasq-dns" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151085 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc1f1975-32c8-494c-b6c7-69a72353879f" containerName="proxy-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151096 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad456e3b-04a1-48d6-8fbc-39e3faa00aa0" containerName="nova-scheduler-scheduler" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151106 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="79476096-5d34-4e8a-9f33-3127bacf4e60" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151118 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dd9e6d7-d0b1-49f3-920a-34e434835bfa" containerName="proxy-httpd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151125 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="531a66a5-f4c9-44f1-83a7-a3e4292fef52" containerName="nova-cell0-conductor-conductor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151135 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="81a92903-9f60-4f44-917f-744a2b80a57c" containerName="nova-cell1-conductor-conductor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151145 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="db7f3cb4-269e-443e-836e-caae1c2d122f" containerName="openstack-network-exporter" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151172 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7030e5d8-2d2b-4cc5-a283-339599595a18" containerName="galera" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151180 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="object-updater" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151188 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="124a9a6f-df08-4085-96d6-0a72f2bb2855" containerName="ovs-vswitchd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151196 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f38fad89-cd6f-47d4-82f9-a761f6a9ed9e" containerName="memcached" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151206 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="612a4642-7af7-4d93-a27f-e63a0593a511" containerName="nova-api-log" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151215 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="account-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151227 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cde8d60-bdf9-405f-8991-5c1f55b0ee76" containerName="container-auditor" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151235 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3311b6bd-a19b-402c-afe4-22222098c669" containerName="mariadb-account-delete" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.151910 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.156792 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.157041 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.162066 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln"] Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.259397 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-secret-volume\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.259711 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzgm9\" (UniqueName: \"kubernetes.io/projected/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-kube-api-access-qzgm9\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.259839 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-config-volume\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.360605 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-secret-volume\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.360707 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzgm9\" (UniqueName: \"kubernetes.io/projected/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-kube-api-access-qzgm9\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.360891 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-config-volume\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.361715 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-config-volume\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.373633 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-secret-volume\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.378178 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzgm9\" (UniqueName: \"kubernetes.io/projected/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-kube-api-access-qzgm9\") pod \"collect-profiles-29395590-tknln\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.507372 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" exitCode=0 Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.507444 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d"} Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.507508 4774 scope.go:117] "RemoveContainer" containerID="50f0abd54c499ac14c722ce78a1be249e3c65fdd8bde5f56a8b5c580514c52ff" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.508206 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:30:00 crc kubenswrapper[4774]: E1121 14:30:00.508607 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.522269 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:00 crc kubenswrapper[4774]: I1121 14:30:00.994585 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln"] Nov 21 14:30:01 crc kubenswrapper[4774]: W1121 14:30:01.003954 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7012a04a_12f8_44bd_9ab4_d67fa60f12b3.slice/crio-a8de5a7984f80e62900e13fbfeac446cd2b8934623e209a2a89e5240cfec6121 WatchSource:0}: Error finding container a8de5a7984f80e62900e13fbfeac446cd2b8934623e209a2a89e5240cfec6121: Status 404 returned error can't find the container with id a8de5a7984f80e62900e13fbfeac446cd2b8934623e209a2a89e5240cfec6121 Nov 21 14:30:01 crc kubenswrapper[4774]: I1121 14:30:01.523708 4774 generic.go:334] "Generic (PLEG): container finished" podID="7012a04a-12f8-44bd-9ab4-d67fa60f12b3" containerID="1f679e4e0fc10db92ee9f76e55737746c2b7a1ec82ace36ee8395ac3871079c2" exitCode=0 Nov 21 14:30:01 crc kubenswrapper[4774]: I1121 14:30:01.523838 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" event={"ID":"7012a04a-12f8-44bd-9ab4-d67fa60f12b3","Type":"ContainerDied","Data":"1f679e4e0fc10db92ee9f76e55737746c2b7a1ec82ace36ee8395ac3871079c2"} Nov 21 14:30:01 crc kubenswrapper[4774]: I1121 14:30:01.523997 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" event={"ID":"7012a04a-12f8-44bd-9ab4-d67fa60f12b3","Type":"ContainerStarted","Data":"a8de5a7984f80e62900e13fbfeac446cd2b8934623e209a2a89e5240cfec6121"} Nov 21 14:30:02 crc kubenswrapper[4774]: I1121 14:30:02.951472 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.105741 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-config-volume\") pod \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.107069 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-secret-volume\") pod \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.107110 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-config-volume" (OuterVolumeSpecName: "config-volume") pod "7012a04a-12f8-44bd-9ab4-d67fa60f12b3" (UID: "7012a04a-12f8-44bd-9ab4-d67fa60f12b3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.108134 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzgm9\" (UniqueName: \"kubernetes.io/projected/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-kube-api-access-qzgm9\") pod \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\" (UID: \"7012a04a-12f8-44bd-9ab4-d67fa60f12b3\") " Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.109110 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.114379 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7012a04a-12f8-44bd-9ab4-d67fa60f12b3" (UID: "7012a04a-12f8-44bd-9ab4-d67fa60f12b3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.115901 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-kube-api-access-qzgm9" (OuterVolumeSpecName: "kube-api-access-qzgm9") pod "7012a04a-12f8-44bd-9ab4-d67fa60f12b3" (UID: "7012a04a-12f8-44bd-9ab4-d67fa60f12b3"). InnerVolumeSpecName "kube-api-access-qzgm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.210494 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.210592 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzgm9\" (UniqueName: \"kubernetes.io/projected/7012a04a-12f8-44bd-9ab4-d67fa60f12b3-kube-api-access-qzgm9\") on node \"crc\" DevicePath \"\"" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.547543 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" event={"ID":"7012a04a-12f8-44bd-9ab4-d67fa60f12b3","Type":"ContainerDied","Data":"a8de5a7984f80e62900e13fbfeac446cd2b8934623e209a2a89e5240cfec6121"} Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.547593 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8de5a7984f80e62900e13fbfeac446cd2b8934623e209a2a89e5240cfec6121" Nov 21 14:30:03 crc kubenswrapper[4774]: I1121 14:30:03.547684 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln" Nov 21 14:30:15 crc kubenswrapper[4774]: I1121 14:30:15.093350 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:30:15 crc kubenswrapper[4774]: E1121 14:30:15.094414 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:30:26 crc kubenswrapper[4774]: I1121 14:30:26.094027 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:30:26 crc kubenswrapper[4774]: E1121 14:30:26.095464 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.676808 4774 scope.go:117] "RemoveContainer" containerID="5ebb6d26c1ba872afd7ee0103564de8a6a3cffae676294d1aa9ec716904f0544" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.720772 4774 scope.go:117] "RemoveContainer" containerID="029dc3912d84b8c7abbaa6bf0010eb02ce724b0c91e424cdb8dbcc692724db07" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.777792 4774 scope.go:117] "RemoveContainer" containerID="ae147c874c0b0365082cb788ba5bfc4259836a02272f29d3be8ca3a93c55d41e" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.812898 4774 scope.go:117] "RemoveContainer" containerID="1271e61aa11802d7fb4ab4117cfddc4172f791cb66fb11ceca51b9d2cdc0afe1" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.882931 4774 scope.go:117] "RemoveContainer" containerID="86fa94a80df28b54b4d5368a5a30fc434548f1c7d9fff7e5b31574d1e6c77717" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.921380 4774 scope.go:117] "RemoveContainer" containerID="01fd58ee9988246dbee8d387c653c96d53a5ed6e239bbaeea7f3c93f959a0ad6" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.959998 4774 scope.go:117] "RemoveContainer" containerID="f5da3006308e36eb20210e041418de5b32da8956f89007e299267104f93eea3b" Nov 21 14:30:35 crc kubenswrapper[4774]: I1121 14:30:35.992372 4774 scope.go:117] "RemoveContainer" containerID="df99df975d7724db717054000f116a20d79041d1f3c9746269b3fac4603daed4" Nov 21 14:30:36 crc kubenswrapper[4774]: I1121 14:30:36.034153 4774 scope.go:117] "RemoveContainer" containerID="e113bb91e61fb20bd55da6f381dd07a86f741c04641af203c9cd800b9d16d231" Nov 21 14:30:36 crc kubenswrapper[4774]: I1121 14:30:36.063131 4774 scope.go:117] "RemoveContainer" containerID="448856a50b1cbeaf51347f942b94dae57d6ecf3b79598bf3ce4b6f069353bd77" Nov 21 14:30:36 crc kubenswrapper[4774]: I1121 14:30:36.088182 4774 scope.go:117] "RemoveContainer" containerID="e39f09e4dfe399ff6aa725be530f4a13f8b5581beb78b8270bd3111542e5c9f3" Nov 21 14:30:36 crc kubenswrapper[4774]: I1121 14:30:36.156457 4774 scope.go:117] "RemoveContainer" containerID="21b7aeda05fe14e7e9c045015b25dd7fb41f27342b2b92959141887020d99f8f" Nov 21 14:30:36 crc kubenswrapper[4774]: I1121 14:30:36.213375 4774 scope.go:117] "RemoveContainer" containerID="bb5a89d0866ada50c918454a8f38ceeea054b4043bfb044f638cc16eb0514685" Nov 21 14:30:39 crc kubenswrapper[4774]: I1121 14:30:39.093605 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:30:39 crc kubenswrapper[4774]: E1121 14:30:39.095232 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:30:51 crc kubenswrapper[4774]: I1121 14:30:51.093338 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:30:51 crc kubenswrapper[4774]: E1121 14:30:51.094904 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:31:06 crc kubenswrapper[4774]: I1121 14:31:06.093466 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:31:06 crc kubenswrapper[4774]: E1121 14:31:06.094618 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:31:20 crc kubenswrapper[4774]: I1121 14:31:20.125455 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:31:20 crc kubenswrapper[4774]: E1121 14:31:20.126909 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:31:33 crc kubenswrapper[4774]: I1121 14:31:33.093724 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:31:33 crc kubenswrapper[4774]: E1121 14:31:33.094604 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.544920 4774 scope.go:117] "RemoveContainer" containerID="969fbf4f7d7b2be68e80f13bc613bae47954c1d9cf0870455b84d0a4bc6e18ef" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.585278 4774 scope.go:117] "RemoveContainer" containerID="aea70590e231f1b48851f4ffa1e6852272819cf991813022fe7bff259b0f4d04" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.626100 4774 scope.go:117] "RemoveContainer" containerID="61597ca4b4483f6ed3cd70ca3eb4e3b66b8c2f1e1c119be77516f9b9aec25720" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.672534 4774 scope.go:117] "RemoveContainer" containerID="b74c59ecf9ca863931ef02c75fc057e4b9cf2d307851a4820767d23b14d83360" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.737122 4774 scope.go:117] "RemoveContainer" containerID="e1bece3865eab576d0537b2757e1996d3eb563738ed1cbdb0bbc09abd23a1ae4" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.762467 4774 scope.go:117] "RemoveContainer" containerID="400661145f174c0b9169da8eed8077bb5592eced771c3239659552c524ba7eb9" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.787485 4774 scope.go:117] "RemoveContainer" containerID="cc1d60dd83d00832b380eb3c950ba9940eb8e75dc9cfe60f03f0990330129de2" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.817755 4774 scope.go:117] "RemoveContainer" containerID="1a0a15cfd145eaf76485365a7148d16577e73bc0add7da74f8b15ec9b79a5303" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.844979 4774 scope.go:117] "RemoveContainer" containerID="8dd78826a36d2a0c84007fefea4af9b39c62687e997e7371678a93eeba3ce1aa" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.871361 4774 scope.go:117] "RemoveContainer" containerID="95fb4afd0377bbc0f0df432194dc5a7490303bfd572fe17af159b5bcfaffa8dd" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.901595 4774 scope.go:117] "RemoveContainer" containerID="f284772158aa9afb2ac683ea5db800eb76ca8ee198f42ac67c00afb5d059483f" Nov 21 14:31:36 crc kubenswrapper[4774]: I1121 14:31:36.929384 4774 scope.go:117] "RemoveContainer" containerID="cfabf58a3660a117c2e4bc8be1a895c8cb8999d2d918f92bec39bbee7161485e" Nov 21 14:31:46 crc kubenswrapper[4774]: I1121 14:31:46.094281 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:31:46 crc kubenswrapper[4774]: E1121 14:31:46.095512 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:31:59 crc kubenswrapper[4774]: I1121 14:31:59.093567 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:31:59 crc kubenswrapper[4774]: E1121 14:31:59.094661 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:32:11 crc kubenswrapper[4774]: I1121 14:32:11.093392 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:32:11 crc kubenswrapper[4774]: E1121 14:32:11.094181 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:32:23 crc kubenswrapper[4774]: I1121 14:32:23.093762 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:32:23 crc kubenswrapper[4774]: E1121 14:32:23.095082 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:32:35 crc kubenswrapper[4774]: I1121 14:32:35.093344 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:32:35 crc kubenswrapper[4774]: E1121 14:32:35.094245 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.116991 4774 scope.go:117] "RemoveContainer" containerID="f1f0758e5401f409b81f484636dad8ae36a7270239b242629d5b05458d87db34" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.193940 4774 scope.go:117] "RemoveContainer" containerID="0b5271262c3c849a993c33ce298b38eb7c992e1d7d39bbd4ae89cc26488705d8" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.215639 4774 scope.go:117] "RemoveContainer" containerID="60d2bd1d86a69536d7cbb5a80d6478c043cbf386394ef9477d5c5f4ac5d413ce" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.261657 4774 scope.go:117] "RemoveContainer" containerID="e14fad553b5ebf16a48988366becd3b511703877d857f82b3dff805dbc486ccb" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.309294 4774 scope.go:117] "RemoveContainer" containerID="038ee870a823946f43bd1d652272038621a6567ad96155f489796343f86963d7" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.341723 4774 scope.go:117] "RemoveContainer" containerID="0b0a5c5f9e19b8e02291c0c54cddb5779d88fddf73c5f3de122740b8608b1067" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.366890 4774 scope.go:117] "RemoveContainer" containerID="d4666e8da93a9918577e1748e849a82b0f153998843e0e0d13b0cf184d7b9c3e" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.401713 4774 scope.go:117] "RemoveContainer" containerID="bdfda6c612f373c01bcb72e2d01db576c8f2e098d2b01dfac3f8bdf41267689e" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.423470 4774 scope.go:117] "RemoveContainer" containerID="99052893755b66df9cfd4b4ab5f26b2fc638a33462e504620e030ca232e1aded" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.455963 4774 scope.go:117] "RemoveContainer" containerID="a43a8569084f224a0dc3ea5dc22f601537d3a46a2926db122b669c10e28a2c1a" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.499095 4774 scope.go:117] "RemoveContainer" containerID="b8d03bb1b95b5563b9e17389f6912b365ce1253044fc2b37f70c9d63eb743194" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.531540 4774 scope.go:117] "RemoveContainer" containerID="8359f311c5a169817e14ceb2a2bd6f2047c46e90f7fcf834e5f7b4e695b44023" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.558876 4774 scope.go:117] "RemoveContainer" containerID="c3d2080c4d1517a927737cdfe470200b33ed1dfc064dd0c21a2afa217e1ea935" Nov 21 14:32:37 crc kubenswrapper[4774]: I1121 14:32:37.583257 4774 scope.go:117] "RemoveContainer" containerID="cf5874a27369c23eb02a38e945173e313d37d5eea273de5528c6e3f4c20042c1" Nov 21 14:32:48 crc kubenswrapper[4774]: I1121 14:32:48.097934 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:32:48 crc kubenswrapper[4774]: E1121 14:32:48.099136 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:33:03 crc kubenswrapper[4774]: I1121 14:33:03.094120 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:33:03 crc kubenswrapper[4774]: E1121 14:33:03.094833 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:33:17 crc kubenswrapper[4774]: I1121 14:33:17.093617 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:33:17 crc kubenswrapper[4774]: E1121 14:33:17.094792 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:33:32 crc kubenswrapper[4774]: I1121 14:33:32.093220 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:33:32 crc kubenswrapper[4774]: E1121 14:33:32.096009 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.832431 4774 scope.go:117] "RemoveContainer" containerID="f794e1beb2a4d2e0aa4f9c55c4bf3c19e6f4475d6330263d426714add8939453" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.857076 4774 scope.go:117] "RemoveContainer" containerID="a7c9ecd2a24973515fc128d880451a4e28dc9101f6d676278ef3ba81ea04ca19" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.897249 4774 scope.go:117] "RemoveContainer" containerID="1c051875890cc87d20e2ccc60014cd64e0c54c66081134927ca3c7218c65fef2" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.915172 4774 scope.go:117] "RemoveContainer" containerID="b6dead7e6b8a9edca06d5258569ba83c012500d4af7950b97020473ff90fca2b" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.931572 4774 scope.go:117] "RemoveContainer" containerID="36dd18acf6da72cf687d626808b4dda1668438a188a9f1018c121f0a0c64d299" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.949438 4774 scope.go:117] "RemoveContainer" containerID="581678da81a51c7400fc4c31a5574369fb2025f18fff099bc0af08132f13654f" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.966173 4774 scope.go:117] "RemoveContainer" containerID="ea9ca71244bc7f1b46eba74e6204643a6aa38bccdbe0a89dc25f4da34716f6b9" Nov 21 14:33:37 crc kubenswrapper[4774]: I1121 14:33:37.981092 4774 scope.go:117] "RemoveContainer" containerID="21b0a39b83253fe42307631f2014556d041f1359f887388fb2f8c11c1f9d769b" Nov 21 14:33:38 crc kubenswrapper[4774]: I1121 14:33:38.000929 4774 scope.go:117] "RemoveContainer" containerID="7d2c6e460846a332f45e2cd1fd8b4211e1fef71fdaba2c330e61b8c2240fa3a3" Nov 21 14:33:38 crc kubenswrapper[4774]: I1121 14:33:38.019127 4774 scope.go:117] "RemoveContainer" containerID="3ab69fee82d7e8cb78023f292bf75b19ed476e76aa600827e67cc6b39f135018" Nov 21 14:33:38 crc kubenswrapper[4774]: I1121 14:33:38.036448 4774 scope.go:117] "RemoveContainer" containerID="a4a27b3c5077e95426b1db0a18c43f2ded3d18629d74ecfdf80ae409e2215348" Nov 21 14:33:46 crc kubenswrapper[4774]: I1121 14:33:46.093123 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:33:46 crc kubenswrapper[4774]: E1121 14:33:46.094119 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:33:57 crc kubenswrapper[4774]: I1121 14:33:57.094881 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:33:57 crc kubenswrapper[4774]: E1121 14:33:57.096077 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:34:10 crc kubenswrapper[4774]: I1121 14:34:10.097379 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:34:10 crc kubenswrapper[4774]: E1121 14:34:10.098503 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:34:23 crc kubenswrapper[4774]: I1121 14:34:23.093997 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:34:23 crc kubenswrapper[4774]: E1121 14:34:23.095138 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:34:36 crc kubenswrapper[4774]: I1121 14:34:36.093662 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:34:36 crc kubenswrapper[4774]: E1121 14:34:36.094825 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:34:38 crc kubenswrapper[4774]: I1121 14:34:38.151549 4774 scope.go:117] "RemoveContainer" containerID="38469690fa07fe07f776fd52a55363dcb7d1936d82c2fd1d99897be0174b2185" Nov 21 14:34:38 crc kubenswrapper[4774]: I1121 14:34:38.190074 4774 scope.go:117] "RemoveContainer" containerID="1e65322f1850f643444a1c4b348d8c5959eb5addc0d201ec1cb0390faccbede6" Nov 21 14:34:38 crc kubenswrapper[4774]: I1121 14:34:38.216396 4774 scope.go:117] "RemoveContainer" containerID="6703fd3bafe778bc3a2dd40f769dd8e0a9d17470badf7480e9e3ba534863afae" Nov 21 14:34:47 crc kubenswrapper[4774]: I1121 14:34:47.093191 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:34:47 crc kubenswrapper[4774]: E1121 14:34:47.094369 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:35:00 crc kubenswrapper[4774]: I1121 14:35:00.101127 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:35:01 crc kubenswrapper[4774]: I1121 14:35:01.030806 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"90a99d25b96ff645ef746e1ff6ba31b977e6ef17320d3d45927e8b989a18d9b8"} Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.411765 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mxjzn"] Nov 21 14:35:13 crc kubenswrapper[4774]: E1121 14:35:13.413084 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7012a04a-12f8-44bd-9ab4-d67fa60f12b3" containerName="collect-profiles" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.413115 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7012a04a-12f8-44bd-9ab4-d67fa60f12b3" containerName="collect-profiles" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.413397 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="650c7a92-1469-4a9c-9a60-a846fe7ed823" containerName="mariadb-account-delete" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.413435 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="58b0c1b5-ec3b-4e7f-a2eb-78eacc9bc8ad" containerName="mariadb-account-delete" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.413460 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7012a04a-12f8-44bd-9ab4-d67fa60f12b3" containerName="collect-profiles" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.415411 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.422900 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mxjzn"] Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.591661 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g424r"] Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.593593 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.597330 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-utilities\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.597477 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7svn\" (UniqueName: \"kubernetes.io/projected/ec5904a5-44b7-4554-88f3-cf256f28d9f2-kube-api-access-q7svn\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.597526 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-catalog-content\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.603865 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g424r"] Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.698955 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7svn\" (UniqueName: \"kubernetes.io/projected/ec5904a5-44b7-4554-88f3-cf256f28d9f2-kube-api-access-q7svn\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.699018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-catalog-content\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.699113 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-utilities\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.699147 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-catalog-content\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.699172 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkggd\" (UniqueName: \"kubernetes.io/projected/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-kube-api-access-mkggd\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.699190 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-utilities\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.700301 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-catalog-content\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.700548 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-utilities\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.723462 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7svn\" (UniqueName: \"kubernetes.io/projected/ec5904a5-44b7-4554-88f3-cf256f28d9f2-kube-api-access-q7svn\") pod \"redhat-marketplace-mxjzn\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.735848 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.800931 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-catalog-content\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.800979 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkggd\" (UniqueName: \"kubernetes.io/projected/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-kube-api-access-mkggd\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.801002 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-utilities\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.801640 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-utilities\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.802116 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-catalog-content\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.823587 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkggd\" (UniqueName: \"kubernetes.io/projected/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-kube-api-access-mkggd\") pod \"certified-operators-g424r\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:13 crc kubenswrapper[4774]: I1121 14:35:13.949280 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:14 crc kubenswrapper[4774]: I1121 14:35:14.290935 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mxjzn"] Nov 21 14:35:14 crc kubenswrapper[4774]: I1121 14:35:14.455950 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g424r"] Nov 21 14:35:14 crc kubenswrapper[4774]: W1121 14:35:14.463554 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1a0ad2bc_dd3f_41c4_b0a6_b28d382f48e2.slice/crio-8a0f5b17445037d3891bd3075f1e17de068f30501ceb201297c8c4c3c28fb75b WatchSource:0}: Error finding container 8a0f5b17445037d3891bd3075f1e17de068f30501ceb201297c8c4c3c28fb75b: Status 404 returned error can't find the container with id 8a0f5b17445037d3891bd3075f1e17de068f30501ceb201297c8c4c3c28fb75b Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.168948 4774 generic.go:334] "Generic (PLEG): container finished" podID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerID="245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176" exitCode=0 Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.169053 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g424r" event={"ID":"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2","Type":"ContainerDied","Data":"245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176"} Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.169091 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g424r" event={"ID":"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2","Type":"ContainerStarted","Data":"8a0f5b17445037d3891bd3075f1e17de068f30501ceb201297c8c4c3c28fb75b"} Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.171502 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.174060 4774 generic.go:334] "Generic (PLEG): container finished" podID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerID="4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4" exitCode=0 Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.174143 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mxjzn" event={"ID":"ec5904a5-44b7-4554-88f3-cf256f28d9f2","Type":"ContainerDied","Data":"4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4"} Nov 21 14:35:15 crc kubenswrapper[4774]: I1121 14:35:15.174209 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mxjzn" event={"ID":"ec5904a5-44b7-4554-88f3-cf256f28d9f2","Type":"ContainerStarted","Data":"bd8ebb17194e1273c05b59ca118220b44596571fa1eca97d2ff57254294bd578"} Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.010438 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jx27p"] Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.013458 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.020143 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jx27p"] Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.144123 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prmg9\" (UniqueName: \"kubernetes.io/projected/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-kube-api-access-prmg9\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.144778 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-catalog-content\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.144977 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-utilities\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.246680 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-catalog-content\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.246775 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-utilities\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.246901 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prmg9\" (UniqueName: \"kubernetes.io/projected/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-kube-api-access-prmg9\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.247322 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-catalog-content\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.247627 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-utilities\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.275325 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prmg9\" (UniqueName: \"kubernetes.io/projected/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-kube-api-access-prmg9\") pod \"community-operators-jx27p\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:16 crc kubenswrapper[4774]: I1121 14:35:16.366418 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:17 crc kubenswrapper[4774]: I1121 14:35:17.504249 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jx27p"] Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.204273 4774 generic.go:334] "Generic (PLEG): container finished" podID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerID="4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305" exitCode=0 Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.204358 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mxjzn" event={"ID":"ec5904a5-44b7-4554-88f3-cf256f28d9f2","Type":"ContainerDied","Data":"4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305"} Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.209254 4774 generic.go:334] "Generic (PLEG): container finished" podID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerID="9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7" exitCode=0 Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.209320 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g424r" event={"ID":"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2","Type":"ContainerDied","Data":"9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7"} Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.211907 4774 generic.go:334] "Generic (PLEG): container finished" podID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerID="e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6" exitCode=0 Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.211933 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jx27p" event={"ID":"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d","Type":"ContainerDied","Data":"e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6"} Nov 21 14:35:18 crc kubenswrapper[4774]: I1121 14:35:18.211952 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jx27p" event={"ID":"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d","Type":"ContainerStarted","Data":"186ffb4563bcdb61d3a248fd73237681cfe774b66047c2ad0c54ac89d71c9eed"} Nov 21 14:35:19 crc kubenswrapper[4774]: I1121 14:35:19.233161 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g424r" event={"ID":"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2","Type":"ContainerStarted","Data":"673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04"} Nov 21 14:35:19 crc kubenswrapper[4774]: I1121 14:35:19.263850 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g424r" podStartSLOduration=2.753885941 podStartE2EDuration="6.263788188s" podCreationTimestamp="2025-11-21 14:35:13 +0000 UTC" firstStartedPulling="2025-11-21 14:35:15.17117691 +0000 UTC m=+1905.823376179" lastFinishedPulling="2025-11-21 14:35:18.681079167 +0000 UTC m=+1909.333278426" observedRunningTime="2025-11-21 14:35:19.261726139 +0000 UTC m=+1909.913925408" watchObservedRunningTime="2025-11-21 14:35:19.263788188 +0000 UTC m=+1909.915987487" Nov 21 14:35:20 crc kubenswrapper[4774]: I1121 14:35:20.245217 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mxjzn" event={"ID":"ec5904a5-44b7-4554-88f3-cf256f28d9f2","Type":"ContainerStarted","Data":"a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e"} Nov 21 14:35:20 crc kubenswrapper[4774]: I1121 14:35:20.248892 4774 generic.go:334] "Generic (PLEG): container finished" podID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerID="363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353" exitCode=0 Nov 21 14:35:20 crc kubenswrapper[4774]: I1121 14:35:20.249037 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jx27p" event={"ID":"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d","Type":"ContainerDied","Data":"363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353"} Nov 21 14:35:20 crc kubenswrapper[4774]: I1121 14:35:20.275765 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mxjzn" podStartSLOduration=3.113786201 podStartE2EDuration="7.275741086s" podCreationTimestamp="2025-11-21 14:35:13 +0000 UTC" firstStartedPulling="2025-11-21 14:35:15.176598824 +0000 UTC m=+1905.828798093" lastFinishedPulling="2025-11-21 14:35:19.338553719 +0000 UTC m=+1909.990752978" observedRunningTime="2025-11-21 14:35:20.27272188 +0000 UTC m=+1910.924921149" watchObservedRunningTime="2025-11-21 14:35:20.275741086 +0000 UTC m=+1910.927940345" Nov 21 14:35:22 crc kubenswrapper[4774]: I1121 14:35:22.269167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jx27p" event={"ID":"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d","Type":"ContainerStarted","Data":"9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c"} Nov 21 14:35:22 crc kubenswrapper[4774]: I1121 14:35:22.304335 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jx27p" podStartSLOduration=4.338304402 podStartE2EDuration="7.304304014s" podCreationTimestamp="2025-11-21 14:35:15 +0000 UTC" firstStartedPulling="2025-11-21 14:35:18.213790356 +0000 UTC m=+1908.865989625" lastFinishedPulling="2025-11-21 14:35:21.179789968 +0000 UTC m=+1911.831989237" observedRunningTime="2025-11-21 14:35:22.297103539 +0000 UTC m=+1912.949302808" watchObservedRunningTime="2025-11-21 14:35:22.304304014 +0000 UTC m=+1912.956503283" Nov 21 14:35:23 crc kubenswrapper[4774]: I1121 14:35:23.737198 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:23 crc kubenswrapper[4774]: I1121 14:35:23.737314 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:23 crc kubenswrapper[4774]: I1121 14:35:23.804696 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:23 crc kubenswrapper[4774]: I1121 14:35:23.950542 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:23 crc kubenswrapper[4774]: I1121 14:35:23.950942 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:24 crc kubenswrapper[4774]: I1121 14:35:24.004139 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:24 crc kubenswrapper[4774]: I1121 14:35:24.350439 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:24 crc kubenswrapper[4774]: I1121 14:35:24.363270 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.367437 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.368079 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.394582 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mxjzn"] Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.395031 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mxjzn" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="registry-server" containerID="cri-o://a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e" gracePeriod=2 Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.461410 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.588529 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g424r"] Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.810666 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.938091 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7svn\" (UniqueName: \"kubernetes.io/projected/ec5904a5-44b7-4554-88f3-cf256f28d9f2-kube-api-access-q7svn\") pod \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.938258 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-catalog-content\") pod \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.938470 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-utilities\") pod \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\" (UID: \"ec5904a5-44b7-4554-88f3-cf256f28d9f2\") " Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.940116 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-utilities" (OuterVolumeSpecName: "utilities") pod "ec5904a5-44b7-4554-88f3-cf256f28d9f2" (UID: "ec5904a5-44b7-4554-88f3-cf256f28d9f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.947165 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec5904a5-44b7-4554-88f3-cf256f28d9f2-kube-api-access-q7svn" (OuterVolumeSpecName: "kube-api-access-q7svn") pod "ec5904a5-44b7-4554-88f3-cf256f28d9f2" (UID: "ec5904a5-44b7-4554-88f3-cf256f28d9f2"). InnerVolumeSpecName "kube-api-access-q7svn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:35:26 crc kubenswrapper[4774]: I1121 14:35:26.981319 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec5904a5-44b7-4554-88f3-cf256f28d9f2" (UID: "ec5904a5-44b7-4554-88f3-cf256f28d9f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.040488 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.040561 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7svn\" (UniqueName: \"kubernetes.io/projected/ec5904a5-44b7-4554-88f3-cf256f28d9f2-kube-api-access-q7svn\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.040585 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5904a5-44b7-4554-88f3-cf256f28d9f2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.331191 4774 generic.go:334] "Generic (PLEG): container finished" podID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerID="a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e" exitCode=0 Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.331383 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mxjzn" event={"ID":"ec5904a5-44b7-4554-88f3-cf256f28d9f2","Type":"ContainerDied","Data":"a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e"} Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.331441 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mxjzn" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.331470 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mxjzn" event={"ID":"ec5904a5-44b7-4554-88f3-cf256f28d9f2","Type":"ContainerDied","Data":"bd8ebb17194e1273c05b59ca118220b44596571fa1eca97d2ff57254294bd578"} Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.331508 4774 scope.go:117] "RemoveContainer" containerID="a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.331663 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-g424r" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="registry-server" containerID="cri-o://673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04" gracePeriod=2 Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.385504 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mxjzn"] Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.388592 4774 scope.go:117] "RemoveContainer" containerID="4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.391510 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mxjzn"] Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.430938 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.504316 4774 scope.go:117] "RemoveContainer" containerID="4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.529394 4774 scope.go:117] "RemoveContainer" containerID="a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e" Nov 21 14:35:27 crc kubenswrapper[4774]: E1121 14:35:27.530901 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e\": container with ID starting with a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e not found: ID does not exist" containerID="a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.530944 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e"} err="failed to get container status \"a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e\": rpc error: code = NotFound desc = could not find container \"a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e\": container with ID starting with a7dffef0b25d2b0a9c3014c0f6a4953ff18f11dd4ade1f1c42daba430328281e not found: ID does not exist" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.530973 4774 scope.go:117] "RemoveContainer" containerID="4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305" Nov 21 14:35:27 crc kubenswrapper[4774]: E1121 14:35:27.531387 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305\": container with ID starting with 4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305 not found: ID does not exist" containerID="4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.531412 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305"} err="failed to get container status \"4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305\": rpc error: code = NotFound desc = could not find container \"4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305\": container with ID starting with 4ba017b5d991c763c61b1fc5e2a61630dbd456856f6fd0777811416b9e4d3305 not found: ID does not exist" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.531427 4774 scope.go:117] "RemoveContainer" containerID="4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4" Nov 21 14:35:27 crc kubenswrapper[4774]: E1121 14:35:27.531913 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4\": container with ID starting with 4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4 not found: ID does not exist" containerID="4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.531941 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4"} err="failed to get container status \"4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4\": rpc error: code = NotFound desc = could not find container \"4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4\": container with ID starting with 4504454e2a192396005f41eb231bfa981fbd89bc1cc2c4fe7aeb8919950880f4 not found: ID does not exist" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.794260 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.957141 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-utilities\") pod \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.957531 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkggd\" (UniqueName: \"kubernetes.io/projected/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-kube-api-access-mkggd\") pod \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.957637 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-catalog-content\") pod \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\" (UID: \"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2\") " Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.959482 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-utilities" (OuterVolumeSpecName: "utilities") pod "1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" (UID: "1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:35:27 crc kubenswrapper[4774]: I1121 14:35:27.967235 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-kube-api-access-mkggd" (OuterVolumeSpecName: "kube-api-access-mkggd") pod "1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" (UID: "1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2"). InnerVolumeSpecName "kube-api-access-mkggd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.028674 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" (UID: "1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.059720 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkggd\" (UniqueName: \"kubernetes.io/projected/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-kube-api-access-mkggd\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.059775 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.059790 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.110894 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" path="/var/lib/kubelet/pods/ec5904a5-44b7-4554-88f3-cf256f28d9f2/volumes" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.351930 4774 generic.go:334] "Generic (PLEG): container finished" podID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerID="673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04" exitCode=0 Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.352092 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g424r" event={"ID":"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2","Type":"ContainerDied","Data":"673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04"} Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.352207 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g424r" event={"ID":"1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2","Type":"ContainerDied","Data":"8a0f5b17445037d3891bd3075f1e17de068f30501ceb201297c8c4c3c28fb75b"} Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.352248 4774 scope.go:117] "RemoveContainer" containerID="673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.352359 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g424r" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.388518 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-g424r"] Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.390528 4774 scope.go:117] "RemoveContainer" containerID="9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.397004 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-g424r"] Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.414333 4774 scope.go:117] "RemoveContainer" containerID="245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.449186 4774 scope.go:117] "RemoveContainer" containerID="673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04" Nov 21 14:35:28 crc kubenswrapper[4774]: E1121 14:35:28.449987 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04\": container with ID starting with 673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04 not found: ID does not exist" containerID="673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.450039 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04"} err="failed to get container status \"673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04\": rpc error: code = NotFound desc = could not find container \"673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04\": container with ID starting with 673a321ea885c24ff803bdb691c0658bd8e396e95fd61a1ce1b930a2b54b3d04 not found: ID does not exist" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.450076 4774 scope.go:117] "RemoveContainer" containerID="9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7" Nov 21 14:35:28 crc kubenswrapper[4774]: E1121 14:35:28.450539 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7\": container with ID starting with 9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7 not found: ID does not exist" containerID="9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.450572 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7"} err="failed to get container status \"9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7\": rpc error: code = NotFound desc = could not find container \"9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7\": container with ID starting with 9971b92caea1295c26f259cd2d1e780a7f2c8e372b008cc124a301493112d8c7 not found: ID does not exist" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.450592 4774 scope.go:117] "RemoveContainer" containerID="245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176" Nov 21 14:35:28 crc kubenswrapper[4774]: E1121 14:35:28.450936 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176\": container with ID starting with 245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176 not found: ID does not exist" containerID="245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.450973 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176"} err="failed to get container status \"245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176\": rpc error: code = NotFound desc = could not find container \"245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176\": container with ID starting with 245aa1222f27ff25a09dd97527370778f1d9df811abb8355007068c3e6322176 not found: ID does not exist" Nov 21 14:35:28 crc kubenswrapper[4774]: I1121 14:35:28.786382 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jx27p"] Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.374270 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jx27p" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="registry-server" containerID="cri-o://9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c" gracePeriod=2 Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.784945 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.894898 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prmg9\" (UniqueName: \"kubernetes.io/projected/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-kube-api-access-prmg9\") pod \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.895015 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-catalog-content\") pod \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.895110 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-utilities\") pod \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\" (UID: \"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d\") " Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.896348 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-utilities" (OuterVolumeSpecName: "utilities") pod "ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" (UID: "ec5276fb-2eed-4d5f-a722-dcbc19d6c16d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.903344 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-kube-api-access-prmg9" (OuterVolumeSpecName: "kube-api-access-prmg9") pod "ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" (UID: "ec5276fb-2eed-4d5f-a722-dcbc19d6c16d"). InnerVolumeSpecName "kube-api-access-prmg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.954134 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" (UID: "ec5276fb-2eed-4d5f-a722-dcbc19d6c16d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.996740 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prmg9\" (UniqueName: \"kubernetes.io/projected/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-kube-api-access-prmg9\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.996793 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:29 crc kubenswrapper[4774]: I1121 14:35:29.996815 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.113099 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" path="/var/lib/kubelet/pods/1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2/volumes" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.387306 4774 generic.go:334] "Generic (PLEG): container finished" podID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerID="9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c" exitCode=0 Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.387356 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jx27p" event={"ID":"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d","Type":"ContainerDied","Data":"9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c"} Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.387394 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jx27p" event={"ID":"ec5276fb-2eed-4d5f-a722-dcbc19d6c16d","Type":"ContainerDied","Data":"186ffb4563bcdb61d3a248fd73237681cfe774b66047c2ad0c54ac89d71c9eed"} Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.387403 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jx27p" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.387415 4774 scope.go:117] "RemoveContainer" containerID="9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.415194 4774 scope.go:117] "RemoveContainer" containerID="363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.416265 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jx27p"] Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.423486 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jx27p"] Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.436434 4774 scope.go:117] "RemoveContainer" containerID="e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.485777 4774 scope.go:117] "RemoveContainer" containerID="9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c" Nov 21 14:35:30 crc kubenswrapper[4774]: E1121 14:35:30.486501 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c\": container with ID starting with 9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c not found: ID does not exist" containerID="9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.486563 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c"} err="failed to get container status \"9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c\": rpc error: code = NotFound desc = could not find container \"9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c\": container with ID starting with 9e0d82f2ed20972f60aabfd2e598c89d331c4525c35f5a150b479c4254f1499c not found: ID does not exist" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.486607 4774 scope.go:117] "RemoveContainer" containerID="363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353" Nov 21 14:35:30 crc kubenswrapper[4774]: E1121 14:35:30.487294 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353\": container with ID starting with 363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353 not found: ID does not exist" containerID="363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.487318 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353"} err="failed to get container status \"363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353\": rpc error: code = NotFound desc = could not find container \"363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353\": container with ID starting with 363c41cda0eef28faa1a47b557005ce035aebc76dc7c581c334a16a94afb5353 not found: ID does not exist" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.487335 4774 scope.go:117] "RemoveContainer" containerID="e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6" Nov 21 14:35:30 crc kubenswrapper[4774]: E1121 14:35:30.487676 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6\": container with ID starting with e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6 not found: ID does not exist" containerID="e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6" Nov 21 14:35:30 crc kubenswrapper[4774]: I1121 14:35:30.487790 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6"} err="failed to get container status \"e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6\": rpc error: code = NotFound desc = could not find container \"e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6\": container with ID starting with e826b4da4328ffc6fb5bb37b86fc38f8b8bbc76dcc3623cc77c4ae3da47708a6 not found: ID does not exist" Nov 21 14:35:32 crc kubenswrapper[4774]: I1121 14:35:32.106367 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" path="/var/lib/kubelet/pods/ec5276fb-2eed-4d5f-a722-dcbc19d6c16d/volumes" Nov 21 14:37:29 crc kubenswrapper[4774]: I1121 14:37:29.602992 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:37:29 crc kubenswrapper[4774]: I1121 14:37:29.603759 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:37:59 crc kubenswrapper[4774]: I1121 14:37:59.601211 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:37:59 crc kubenswrapper[4774]: I1121 14:37:59.601960 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977279 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2hj6p"] Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977725 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="extract-utilities" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977744 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="extract-utilities" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977763 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="extract-content" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977772 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="extract-content" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977788 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977796 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977836 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977846 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977865 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977873 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977891 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="extract-utilities" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977899 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="extract-utilities" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977908 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="extract-utilities" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977916 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="extract-utilities" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977933 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="extract-content" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977941 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="extract-content" Nov 21 14:38:00 crc kubenswrapper[4774]: E1121 14:38:00.977962 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="extract-content" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.977970 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="extract-content" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.978152 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec5904a5-44b7-4554-88f3-cf256f28d9f2" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.978175 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a0ad2bc-dd3f-41c4-b0a6-b28d382f48e2" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.978190 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec5276fb-2eed-4d5f-a722-dcbc19d6c16d" containerName="registry-server" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.979772 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:00 crc kubenswrapper[4774]: I1121 14:38:00.986329 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2hj6p"] Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.077415 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqrc7\" (UniqueName: \"kubernetes.io/projected/b4792d4e-f3e0-42f4-804e-d8826791208c-kube-api-access-rqrc7\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.077550 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-catalog-content\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.077684 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-utilities\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.179035 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqrc7\" (UniqueName: \"kubernetes.io/projected/b4792d4e-f3e0-42f4-804e-d8826791208c-kube-api-access-rqrc7\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.179345 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-catalog-content\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.179460 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-utilities\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.179975 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-catalog-content\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.180005 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-utilities\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.199251 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqrc7\" (UniqueName: \"kubernetes.io/projected/b4792d4e-f3e0-42f4-804e-d8826791208c-kube-api-access-rqrc7\") pod \"redhat-operators-2hj6p\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.303301 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.556140 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2hj6p"] Nov 21 14:38:01 crc kubenswrapper[4774]: I1121 14:38:01.831176 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hj6p" event={"ID":"b4792d4e-f3e0-42f4-804e-d8826791208c","Type":"ContainerStarted","Data":"25a95efa7d31b21141bbe2b461fa9de38efd8dba3fef28bbfaadd29524872316"} Nov 21 14:38:02 crc kubenswrapper[4774]: I1121 14:38:02.840204 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerID="e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b" exitCode=0 Nov 21 14:38:02 crc kubenswrapper[4774]: I1121 14:38:02.840281 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hj6p" event={"ID":"b4792d4e-f3e0-42f4-804e-d8826791208c","Type":"ContainerDied","Data":"e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b"} Nov 21 14:38:05 crc kubenswrapper[4774]: I1121 14:38:05.864673 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerID="daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3" exitCode=0 Nov 21 14:38:05 crc kubenswrapper[4774]: I1121 14:38:05.864761 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hj6p" event={"ID":"b4792d4e-f3e0-42f4-804e-d8826791208c","Type":"ContainerDied","Data":"daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3"} Nov 21 14:38:06 crc kubenswrapper[4774]: I1121 14:38:06.875441 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hj6p" event={"ID":"b4792d4e-f3e0-42f4-804e-d8826791208c","Type":"ContainerStarted","Data":"5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6"} Nov 21 14:38:06 crc kubenswrapper[4774]: I1121 14:38:06.897755 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2hj6p" podStartSLOduration=3.278206136 podStartE2EDuration="6.897729136s" podCreationTimestamp="2025-11-21 14:38:00 +0000 UTC" firstStartedPulling="2025-11-21 14:38:02.842852634 +0000 UTC m=+2073.495051893" lastFinishedPulling="2025-11-21 14:38:06.462375634 +0000 UTC m=+2077.114574893" observedRunningTime="2025-11-21 14:38:06.892188157 +0000 UTC m=+2077.544387426" watchObservedRunningTime="2025-11-21 14:38:06.897729136 +0000 UTC m=+2077.549928395" Nov 21 14:38:11 crc kubenswrapper[4774]: I1121 14:38:11.303678 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:11 crc kubenswrapper[4774]: I1121 14:38:11.304553 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:12 crc kubenswrapper[4774]: I1121 14:38:12.341536 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2hj6p" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="registry-server" probeResult="failure" output=< Nov 21 14:38:12 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 14:38:12 crc kubenswrapper[4774]: > Nov 21 14:38:21 crc kubenswrapper[4774]: I1121 14:38:21.366427 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:21 crc kubenswrapper[4774]: I1121 14:38:21.417402 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:21 crc kubenswrapper[4774]: I1121 14:38:21.611554 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2hj6p"] Nov 21 14:38:23 crc kubenswrapper[4774]: I1121 14:38:23.033612 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2hj6p" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="registry-server" containerID="cri-o://5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6" gracePeriod=2 Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.005671 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.047705 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerID="5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6" exitCode=0 Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.047772 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hj6p" event={"ID":"b4792d4e-f3e0-42f4-804e-d8826791208c","Type":"ContainerDied","Data":"5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6"} Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.047841 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2hj6p" event={"ID":"b4792d4e-f3e0-42f4-804e-d8826791208c","Type":"ContainerDied","Data":"25a95efa7d31b21141bbe2b461fa9de38efd8dba3fef28bbfaadd29524872316"} Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.047871 4774 scope.go:117] "RemoveContainer" containerID="5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.047869 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2hj6p" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.074199 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-utilities\") pod \"b4792d4e-f3e0-42f4-804e-d8826791208c\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.074302 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqrc7\" (UniqueName: \"kubernetes.io/projected/b4792d4e-f3e0-42f4-804e-d8826791208c-kube-api-access-rqrc7\") pod \"b4792d4e-f3e0-42f4-804e-d8826791208c\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.074550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-catalog-content\") pod \"b4792d4e-f3e0-42f4-804e-d8826791208c\" (UID: \"b4792d4e-f3e0-42f4-804e-d8826791208c\") " Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.075717 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-utilities" (OuterVolumeSpecName: "utilities") pod "b4792d4e-f3e0-42f4-804e-d8826791208c" (UID: "b4792d4e-f3e0-42f4-804e-d8826791208c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.078433 4774 scope.go:117] "RemoveContainer" containerID="daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.083742 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4792d4e-f3e0-42f4-804e-d8826791208c-kube-api-access-rqrc7" (OuterVolumeSpecName: "kube-api-access-rqrc7") pod "b4792d4e-f3e0-42f4-804e-d8826791208c" (UID: "b4792d4e-f3e0-42f4-804e-d8826791208c"). InnerVolumeSpecName "kube-api-access-rqrc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.128896 4774 scope.go:117] "RemoveContainer" containerID="e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.151548 4774 scope.go:117] "RemoveContainer" containerID="5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6" Nov 21 14:38:24 crc kubenswrapper[4774]: E1121 14:38:24.152384 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6\": container with ID starting with 5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6 not found: ID does not exist" containerID="5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.152475 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6"} err="failed to get container status \"5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6\": rpc error: code = NotFound desc = could not find container \"5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6\": container with ID starting with 5203ea120160708c5cd20ffe013384cd36b28f69c3c6e7866e7c3e3ce76325d6 not found: ID does not exist" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.152532 4774 scope.go:117] "RemoveContainer" containerID="daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3" Nov 21 14:38:24 crc kubenswrapper[4774]: E1121 14:38:24.153553 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3\": container with ID starting with daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3 not found: ID does not exist" containerID="daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.153622 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3"} err="failed to get container status \"daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3\": rpc error: code = NotFound desc = could not find container \"daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3\": container with ID starting with daa43e08bfdb42e91c8313335eb4ebf8f77bf49ac7b671fdb6e44e9a8e50fcf3 not found: ID does not exist" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.153667 4774 scope.go:117] "RemoveContainer" containerID="e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b" Nov 21 14:38:24 crc kubenswrapper[4774]: E1121 14:38:24.154518 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b\": container with ID starting with e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b not found: ID does not exist" containerID="e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.154600 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b"} err="failed to get container status \"e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b\": rpc error: code = NotFound desc = could not find container \"e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b\": container with ID starting with e10b1de4bd219d8ab3c8a356b2d6b1373007ac713542ebee50ead7a186c4213b not found: ID does not exist" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.177111 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.177193 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqrc7\" (UniqueName: \"kubernetes.io/projected/b4792d4e-f3e0-42f4-804e-d8826791208c-kube-api-access-rqrc7\") on node \"crc\" DevicePath \"\"" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.207762 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4792d4e-f3e0-42f4-804e-d8826791208c" (UID: "b4792d4e-f3e0-42f4-804e-d8826791208c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.278665 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4792d4e-f3e0-42f4-804e-d8826791208c-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.406469 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2hj6p"] Nov 21 14:38:24 crc kubenswrapper[4774]: I1121 14:38:24.419098 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2hj6p"] Nov 21 14:38:26 crc kubenswrapper[4774]: I1121 14:38:26.111952 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" path="/var/lib/kubelet/pods/b4792d4e-f3e0-42f4-804e-d8826791208c/volumes" Nov 21 14:38:29 crc kubenswrapper[4774]: I1121 14:38:29.600790 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:38:29 crc kubenswrapper[4774]: I1121 14:38:29.600908 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:38:29 crc kubenswrapper[4774]: I1121 14:38:29.600961 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:38:29 crc kubenswrapper[4774]: I1121 14:38:29.601893 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"90a99d25b96ff645ef746e1ff6ba31b977e6ef17320d3d45927e8b989a18d9b8"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:38:29 crc kubenswrapper[4774]: I1121 14:38:29.601971 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://90a99d25b96ff645ef746e1ff6ba31b977e6ef17320d3d45927e8b989a18d9b8" gracePeriod=600 Nov 21 14:38:30 crc kubenswrapper[4774]: I1121 14:38:30.121614 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="90a99d25b96ff645ef746e1ff6ba31b977e6ef17320d3d45927e8b989a18d9b8" exitCode=0 Nov 21 14:38:30 crc kubenswrapper[4774]: I1121 14:38:30.121682 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"90a99d25b96ff645ef746e1ff6ba31b977e6ef17320d3d45927e8b989a18d9b8"} Nov 21 14:38:30 crc kubenswrapper[4774]: I1121 14:38:30.122600 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6"} Nov 21 14:38:30 crc kubenswrapper[4774]: I1121 14:38:30.122645 4774 scope.go:117] "RemoveContainer" containerID="a6764a1bd07238c2ac685c217d2c2e8585c2dc0fc85dc42611de1774090b834d" Nov 21 14:40:59 crc kubenswrapper[4774]: I1121 14:40:59.601195 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:40:59 crc kubenswrapper[4774]: I1121 14:40:59.601862 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:41:29 crc kubenswrapper[4774]: I1121 14:41:29.600788 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:41:29 crc kubenswrapper[4774]: I1121 14:41:29.601557 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:41:59 crc kubenswrapper[4774]: I1121 14:41:59.601334 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:41:59 crc kubenswrapper[4774]: I1121 14:41:59.602139 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:41:59 crc kubenswrapper[4774]: I1121 14:41:59.602203 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:41:59 crc kubenswrapper[4774]: I1121 14:41:59.602991 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:41:59 crc kubenswrapper[4774]: I1121 14:41:59.603076 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" gracePeriod=600 Nov 21 14:41:59 crc kubenswrapper[4774]: E1121 14:41:59.733431 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:42:00 crc kubenswrapper[4774]: I1121 14:42:00.155640 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" exitCode=0 Nov 21 14:42:00 crc kubenswrapper[4774]: I1121 14:42:00.155706 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6"} Nov 21 14:42:00 crc kubenswrapper[4774]: I1121 14:42:00.155761 4774 scope.go:117] "RemoveContainer" containerID="90a99d25b96ff645ef746e1ff6ba31b977e6ef17320d3d45927e8b989a18d9b8" Nov 21 14:42:00 crc kubenswrapper[4774]: I1121 14:42:00.156713 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:42:00 crc kubenswrapper[4774]: E1121 14:42:00.157073 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:42:13 crc kubenswrapper[4774]: I1121 14:42:13.093195 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:42:13 crc kubenswrapper[4774]: E1121 14:42:13.094021 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:42:27 crc kubenswrapper[4774]: I1121 14:42:27.092815 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:42:27 crc kubenswrapper[4774]: E1121 14:42:27.093589 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:42:38 crc kubenswrapper[4774]: I1121 14:42:38.093141 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:42:38 crc kubenswrapper[4774]: E1121 14:42:38.093959 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:42:52 crc kubenswrapper[4774]: I1121 14:42:52.094331 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:42:52 crc kubenswrapper[4774]: E1121 14:42:52.095320 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:43:05 crc kubenswrapper[4774]: I1121 14:43:05.094228 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:43:05 crc kubenswrapper[4774]: E1121 14:43:05.095343 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:43:16 crc kubenswrapper[4774]: I1121 14:43:16.094005 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:43:16 crc kubenswrapper[4774]: E1121 14:43:16.095571 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:43:27 crc kubenswrapper[4774]: I1121 14:43:27.093678 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:43:27 crc kubenswrapper[4774]: E1121 14:43:27.094783 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:43:40 crc kubenswrapper[4774]: I1121 14:43:40.106164 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:43:40 crc kubenswrapper[4774]: E1121 14:43:40.107305 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:43:55 crc kubenswrapper[4774]: I1121 14:43:55.093200 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:43:55 crc kubenswrapper[4774]: E1121 14:43:55.094350 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:44:06 crc kubenswrapper[4774]: I1121 14:44:06.093880 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:44:06 crc kubenswrapper[4774]: E1121 14:44:06.095299 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:44:20 crc kubenswrapper[4774]: I1121 14:44:20.099528 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:44:20 crc kubenswrapper[4774]: E1121 14:44:20.100582 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:44:33 crc kubenswrapper[4774]: I1121 14:44:33.094512 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:44:33 crc kubenswrapper[4774]: E1121 14:44:33.096095 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:44:45 crc kubenswrapper[4774]: I1121 14:44:45.093298 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:44:45 crc kubenswrapper[4774]: E1121 14:44:45.094223 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:44:57 crc kubenswrapper[4774]: I1121 14:44:57.093692 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:44:57 crc kubenswrapper[4774]: E1121 14:44:57.097930 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.155024 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd"] Nov 21 14:45:00 crc kubenswrapper[4774]: E1121 14:45:00.156027 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="extract-content" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.156050 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="extract-content" Nov 21 14:45:00 crc kubenswrapper[4774]: E1121 14:45:00.156080 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="extract-utilities" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.156089 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="extract-utilities" Nov 21 14:45:00 crc kubenswrapper[4774]: E1121 14:45:00.156110 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="registry-server" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.156118 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="registry-server" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.156285 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4792d4e-f3e0-42f4-804e-d8826791208c" containerName="registry-server" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.157275 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.163610 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.163610 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.171004 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd"] Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.209532 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99841b31-985c-4ca5-bbb7-443fab72f9d8-config-volume\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.209749 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhxwh\" (UniqueName: \"kubernetes.io/projected/99841b31-985c-4ca5-bbb7-443fab72f9d8-kube-api-access-xhxwh\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.209866 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99841b31-985c-4ca5-bbb7-443fab72f9d8-secret-volume\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.311489 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99841b31-985c-4ca5-bbb7-443fab72f9d8-config-volume\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.311618 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhxwh\" (UniqueName: \"kubernetes.io/projected/99841b31-985c-4ca5-bbb7-443fab72f9d8-kube-api-access-xhxwh\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.311644 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99841b31-985c-4ca5-bbb7-443fab72f9d8-secret-volume\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.312561 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99841b31-985c-4ca5-bbb7-443fab72f9d8-config-volume\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.318808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99841b31-985c-4ca5-bbb7-443fab72f9d8-secret-volume\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.335909 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhxwh\" (UniqueName: \"kubernetes.io/projected/99841b31-985c-4ca5-bbb7-443fab72f9d8-kube-api-access-xhxwh\") pod \"collect-profiles-29395605-qrbtd\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.481874 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.726777 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd"] Nov 21 14:45:00 crc kubenswrapper[4774]: I1121 14:45:00.878908 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" event={"ID":"99841b31-985c-4ca5-bbb7-443fab72f9d8","Type":"ContainerStarted","Data":"bca97e7074ac89cd862bf72c619c54d62b43f2139789cc67043b5b8601d0f0f3"} Nov 21 14:45:01 crc kubenswrapper[4774]: I1121 14:45:01.888008 4774 generic.go:334] "Generic (PLEG): container finished" podID="99841b31-985c-4ca5-bbb7-443fab72f9d8" containerID="bd52be5a342ffdb29dc3e5a9611d5df8f96b916802c0e6af988cdba470627421" exitCode=0 Nov 21 14:45:01 crc kubenswrapper[4774]: I1121 14:45:01.888081 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" event={"ID":"99841b31-985c-4ca5-bbb7-443fab72f9d8","Type":"ContainerDied","Data":"bd52be5a342ffdb29dc3e5a9611d5df8f96b916802c0e6af988cdba470627421"} Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.177882 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.258203 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99841b31-985c-4ca5-bbb7-443fab72f9d8-secret-volume\") pod \"99841b31-985c-4ca5-bbb7-443fab72f9d8\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.258269 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99841b31-985c-4ca5-bbb7-443fab72f9d8-config-volume\") pod \"99841b31-985c-4ca5-bbb7-443fab72f9d8\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.258451 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhxwh\" (UniqueName: \"kubernetes.io/projected/99841b31-985c-4ca5-bbb7-443fab72f9d8-kube-api-access-xhxwh\") pod \"99841b31-985c-4ca5-bbb7-443fab72f9d8\" (UID: \"99841b31-985c-4ca5-bbb7-443fab72f9d8\") " Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.259114 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99841b31-985c-4ca5-bbb7-443fab72f9d8-config-volume" (OuterVolumeSpecName: "config-volume") pod "99841b31-985c-4ca5-bbb7-443fab72f9d8" (UID: "99841b31-985c-4ca5-bbb7-443fab72f9d8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.259948 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/99841b31-985c-4ca5-bbb7-443fab72f9d8-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.265135 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99841b31-985c-4ca5-bbb7-443fab72f9d8-kube-api-access-xhxwh" (OuterVolumeSpecName: "kube-api-access-xhxwh") pod "99841b31-985c-4ca5-bbb7-443fab72f9d8" (UID: "99841b31-985c-4ca5-bbb7-443fab72f9d8"). InnerVolumeSpecName "kube-api-access-xhxwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.276740 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99841b31-985c-4ca5-bbb7-443fab72f9d8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "99841b31-985c-4ca5-bbb7-443fab72f9d8" (UID: "99841b31-985c-4ca5-bbb7-443fab72f9d8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.361766 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhxwh\" (UniqueName: \"kubernetes.io/projected/99841b31-985c-4ca5-bbb7-443fab72f9d8-kube-api-access-xhxwh\") on node \"crc\" DevicePath \"\"" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.361809 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/99841b31-985c-4ca5-bbb7-443fab72f9d8-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.904573 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" event={"ID":"99841b31-985c-4ca5-bbb7-443fab72f9d8","Type":"ContainerDied","Data":"bca97e7074ac89cd862bf72c619c54d62b43f2139789cc67043b5b8601d0f0f3"} Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.904619 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bca97e7074ac89cd862bf72c619c54d62b43f2139789cc67043b5b8601d0f0f3" Nov 21 14:45:03 crc kubenswrapper[4774]: I1121 14:45:03.904626 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd" Nov 21 14:45:04 crc kubenswrapper[4774]: I1121 14:45:04.263744 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9"] Nov 21 14:45:04 crc kubenswrapper[4774]: I1121 14:45:04.271542 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395560-7hdr9"] Nov 21 14:45:06 crc kubenswrapper[4774]: I1121 14:45:06.109855 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa95d68b-3894-42cf-9af0-18b2575250c4" path="/var/lib/kubelet/pods/aa95d68b-3894-42cf-9af0-18b2575250c4/volumes" Nov 21 14:45:12 crc kubenswrapper[4774]: I1121 14:45:12.093940 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:45:12 crc kubenswrapper[4774]: E1121 14:45:12.094779 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:45:23 crc kubenswrapper[4774]: I1121 14:45:23.093266 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:45:23 crc kubenswrapper[4774]: E1121 14:45:23.094439 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:45:37 crc kubenswrapper[4774]: I1121 14:45:37.094740 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:45:37 crc kubenswrapper[4774]: E1121 14:45:37.095927 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:45:38 crc kubenswrapper[4774]: I1121 14:45:38.536510 4774 scope.go:117] "RemoveContainer" containerID="3c9d0d6839eff2aad41b795c949bb392c5cf7779ab4b8bcbb72507cbd56b42e3" Nov 21 14:45:49 crc kubenswrapper[4774]: I1121 14:45:49.096126 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:45:49 crc kubenswrapper[4774]: E1121 14:45:49.097294 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:46:01 crc kubenswrapper[4774]: I1121 14:46:01.093507 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:46:01 crc kubenswrapper[4774]: E1121 14:46:01.094570 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:46:14 crc kubenswrapper[4774]: I1121 14:46:14.093566 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:46:14 crc kubenswrapper[4774]: E1121 14:46:14.094663 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:46:28 crc kubenswrapper[4774]: I1121 14:46:28.093179 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:46:28 crc kubenswrapper[4774]: E1121 14:46:28.094016 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.876972 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5smvv"] Nov 21 14:46:35 crc kubenswrapper[4774]: E1121 14:46:35.878216 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99841b31-985c-4ca5-bbb7-443fab72f9d8" containerName="collect-profiles" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.878234 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="99841b31-985c-4ca5-bbb7-443fab72f9d8" containerName="collect-profiles" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.878484 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="99841b31-985c-4ca5-bbb7-443fab72f9d8" containerName="collect-profiles" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.879747 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.895264 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5smvv"] Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.979341 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-utilities\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.979425 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2r9r\" (UniqueName: \"kubernetes.io/projected/67f5d7eb-2806-413f-af7c-12cd9946c0d9-kube-api-access-n2r9r\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:35 crc kubenswrapper[4774]: I1121 14:46:35.979484 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-catalog-content\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.080764 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-catalog-content\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.080905 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-utilities\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.080936 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2r9r\" (UniqueName: \"kubernetes.io/projected/67f5d7eb-2806-413f-af7c-12cd9946c0d9-kube-api-access-n2r9r\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.081465 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-catalog-content\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.081742 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-utilities\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.116410 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2r9r\" (UniqueName: \"kubernetes.io/projected/67f5d7eb-2806-413f-af7c-12cd9946c0d9-kube-api-access-n2r9r\") pod \"community-operators-5smvv\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.209962 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.793050 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5smvv"] Nov 21 14:46:36 crc kubenswrapper[4774]: I1121 14:46:36.853543 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5smvv" event={"ID":"67f5d7eb-2806-413f-af7c-12cd9946c0d9","Type":"ContainerStarted","Data":"7c809bbd86dcbf4b7ec7a723f9756e1275b297bd147c181cff3d9afe42335bf7"} Nov 21 14:46:37 crc kubenswrapper[4774]: I1121 14:46:37.864339 4774 generic.go:334] "Generic (PLEG): container finished" podID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerID="6816c0474ce31ede75f984fa719ee01b65779397e0fb431697aa9467b079ead9" exitCode=0 Nov 21 14:46:37 crc kubenswrapper[4774]: I1121 14:46:37.864409 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5smvv" event={"ID":"67f5d7eb-2806-413f-af7c-12cd9946c0d9","Type":"ContainerDied","Data":"6816c0474ce31ede75f984fa719ee01b65779397e0fb431697aa9467b079ead9"} Nov 21 14:46:37 crc kubenswrapper[4774]: I1121 14:46:37.867064 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:46:39 crc kubenswrapper[4774]: I1121 14:46:39.093346 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:46:39 crc kubenswrapper[4774]: E1121 14:46:39.094235 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:46:40 crc kubenswrapper[4774]: I1121 14:46:40.892751 4774 generic.go:334] "Generic (PLEG): container finished" podID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerID="a574a4b671f8aad51a93ec6d96fe9699d7121f6c7d99fb9ce43475da1b8c87b0" exitCode=0 Nov 21 14:46:40 crc kubenswrapper[4774]: I1121 14:46:40.892872 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5smvv" event={"ID":"67f5d7eb-2806-413f-af7c-12cd9946c0d9","Type":"ContainerDied","Data":"a574a4b671f8aad51a93ec6d96fe9699d7121f6c7d99fb9ce43475da1b8c87b0"} Nov 21 14:46:41 crc kubenswrapper[4774]: I1121 14:46:41.955631 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gzq4q"] Nov 21 14:46:41 crc kubenswrapper[4774]: I1121 14:46:41.958988 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:41 crc kubenswrapper[4774]: I1121 14:46:41.973319 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzq4q"] Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.088252 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-catalog-content\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.088438 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-utilities\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.088661 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b5ms\" (UniqueName: \"kubernetes.io/projected/c2e57acc-f4ed-4ead-9811-2e606bbde022-kube-api-access-6b5ms\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.190744 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b5ms\" (UniqueName: \"kubernetes.io/projected/c2e57acc-f4ed-4ead-9811-2e606bbde022-kube-api-access-6b5ms\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.191327 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-catalog-content\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.191368 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-utilities\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.192076 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-catalog-content\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.192161 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-utilities\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.215988 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b5ms\" (UniqueName: \"kubernetes.io/projected/c2e57acc-f4ed-4ead-9811-2e606bbde022-kube-api-access-6b5ms\") pod \"redhat-marketplace-gzq4q\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.298509 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.834702 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzq4q"] Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.932644 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5smvv" event={"ID":"67f5d7eb-2806-413f-af7c-12cd9946c0d9","Type":"ContainerStarted","Data":"5ebfc288d2ca82ef71cf610a1355e30c206574f0d49f2a46402bd571319d487a"} Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.934368 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerStarted","Data":"4790a140c99c1823daf0237cce60c060fd52343cd04068121785eb307b7a42e3"} Nov 21 14:46:42 crc kubenswrapper[4774]: I1121 14:46:42.953976 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5smvv" podStartSLOduration=3.772951457 podStartE2EDuration="7.953948894s" podCreationTimestamp="2025-11-21 14:46:35 +0000 UTC" firstStartedPulling="2025-11-21 14:46:37.866678175 +0000 UTC m=+2588.518877434" lastFinishedPulling="2025-11-21 14:46:42.047675602 +0000 UTC m=+2592.699874871" observedRunningTime="2025-11-21 14:46:42.950678102 +0000 UTC m=+2593.602877381" watchObservedRunningTime="2025-11-21 14:46:42.953948894 +0000 UTC m=+2593.606148143" Nov 21 14:46:43 crc kubenswrapper[4774]: I1121 14:46:43.952415 4774 generic.go:334] "Generic (PLEG): container finished" podID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerID="65bdd452e35f353c401f19d33d6c5c70cc566ebab68c13dec4647e24fe68bc06" exitCode=0 Nov 21 14:46:43 crc kubenswrapper[4774]: I1121 14:46:43.953843 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerDied","Data":"65bdd452e35f353c401f19d33d6c5c70cc566ebab68c13dec4647e24fe68bc06"} Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.864137 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lhxgt"] Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.866276 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.894055 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lhxgt"] Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.964649 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-utilities\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.965036 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2lk8\" (UniqueName: \"kubernetes.io/projected/f5935916-b1bc-419c-b2de-21eeb777a88b-kube-api-access-b2lk8\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.965151 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-catalog-content\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:45 crc kubenswrapper[4774]: I1121 14:46:45.979846 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerStarted","Data":"a12cf282dc443a16fe5330604db2074937c7f23b0bc7d9e3b0fb6a36f01bf85d"} Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.066330 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-catalog-content\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.066429 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-utilities\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.066478 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2lk8\" (UniqueName: \"kubernetes.io/projected/f5935916-b1bc-419c-b2de-21eeb777a88b-kube-api-access-b2lk8\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.067055 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-catalog-content\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.067396 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-utilities\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.088768 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2lk8\" (UniqueName: \"kubernetes.io/projected/f5935916-b1bc-419c-b2de-21eeb777a88b-kube-api-access-b2lk8\") pod \"certified-operators-lhxgt\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.183233 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.211119 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.211201 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.282072 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.663112 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lhxgt"] Nov 21 14:46:46 crc kubenswrapper[4774]: W1121 14:46:46.680711 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5935916_b1bc_419c_b2de_21eeb777a88b.slice/crio-dfb4a225f663b39daef9267c8d2f950e59ece56f94bace756d67fce24f1e7f49 WatchSource:0}: Error finding container dfb4a225f663b39daef9267c8d2f950e59ece56f94bace756d67fce24f1e7f49: Status 404 returned error can't find the container with id dfb4a225f663b39daef9267c8d2f950e59ece56f94bace756d67fce24f1e7f49 Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.990426 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhxgt" event={"ID":"f5935916-b1bc-419c-b2de-21eeb777a88b","Type":"ContainerStarted","Data":"dfb4a225f663b39daef9267c8d2f950e59ece56f94bace756d67fce24f1e7f49"} Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.993842 4774 generic.go:334] "Generic (PLEG): container finished" podID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerID="a12cf282dc443a16fe5330604db2074937c7f23b0bc7d9e3b0fb6a36f01bf85d" exitCode=0 Nov 21 14:46:46 crc kubenswrapper[4774]: I1121 14:46:46.993928 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerDied","Data":"a12cf282dc443a16fe5330604db2074937c7f23b0bc7d9e3b0fb6a36f01bf85d"} Nov 21 14:46:48 crc kubenswrapper[4774]: I1121 14:46:48.010608 4774 generic.go:334] "Generic (PLEG): container finished" podID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerID="2811a363acd4c4b1ac6844530eda744386843b172ea6cb473adeadc09984a1ee" exitCode=0 Nov 21 14:46:48 crc kubenswrapper[4774]: I1121 14:46:48.010687 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhxgt" event={"ID":"f5935916-b1bc-419c-b2de-21eeb777a88b","Type":"ContainerDied","Data":"2811a363acd4c4b1ac6844530eda744386843b172ea6cb473adeadc09984a1ee"} Nov 21 14:46:51 crc kubenswrapper[4774]: I1121 14:46:51.046514 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerStarted","Data":"4bfeb4bb33e83011114e769d4660d0b5e808bf797a423181ca07c57fe4119c8d"} Nov 21 14:46:51 crc kubenswrapper[4774]: I1121 14:46:51.131628 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gzq4q" podStartSLOduration=4.075732159 podStartE2EDuration="10.131598236s" podCreationTimestamp="2025-11-21 14:46:41 +0000 UTC" firstStartedPulling="2025-11-21 14:46:43.955412373 +0000 UTC m=+2594.607611632" lastFinishedPulling="2025-11-21 14:46:50.01127844 +0000 UTC m=+2600.663477709" observedRunningTime="2025-11-21 14:46:51.123806095 +0000 UTC m=+2601.776005364" watchObservedRunningTime="2025-11-21 14:46:51.131598236 +0000 UTC m=+2601.783797495" Nov 21 14:46:52 crc kubenswrapper[4774]: I1121 14:46:52.057902 4774 generic.go:334] "Generic (PLEG): container finished" podID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerID="08a2464db6921ffe04072ecb06db692ab1b6de9626cf2e4014efdedbdd7fa2fb" exitCode=0 Nov 21 14:46:52 crc kubenswrapper[4774]: I1121 14:46:52.058167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhxgt" event={"ID":"f5935916-b1bc-419c-b2de-21eeb777a88b","Type":"ContainerDied","Data":"08a2464db6921ffe04072ecb06db692ab1b6de9626cf2e4014efdedbdd7fa2fb"} Nov 21 14:46:52 crc kubenswrapper[4774]: I1121 14:46:52.298989 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:52 crc kubenswrapper[4774]: I1121 14:46:52.299109 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:52 crc kubenswrapper[4774]: I1121 14:46:52.355671 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:46:53 crc kubenswrapper[4774]: I1121 14:46:53.092972 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:46:53 crc kubenswrapper[4774]: E1121 14:46:53.093381 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:46:56 crc kubenswrapper[4774]: I1121 14:46:56.283237 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:46:56 crc kubenswrapper[4774]: I1121 14:46:56.343968 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5smvv"] Nov 21 14:46:57 crc kubenswrapper[4774]: I1121 14:46:57.111501 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5smvv" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="registry-server" containerID="cri-o://5ebfc288d2ca82ef71cf610a1355e30c206574f0d49f2a46402bd571319d487a" gracePeriod=2 Nov 21 14:47:02 crc kubenswrapper[4774]: I1121 14:47:02.376440 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:47:02 crc kubenswrapper[4774]: I1121 14:47:02.432301 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzq4q"] Nov 21 14:47:03 crc kubenswrapper[4774]: I1121 14:47:03.965152 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.070395 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-utilities\") pod \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.070507 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-catalog-content\") pod \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.070585 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2r9r\" (UniqueName: \"kubernetes.io/projected/67f5d7eb-2806-413f-af7c-12cd9946c0d9-kube-api-access-n2r9r\") pod \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\" (UID: \"67f5d7eb-2806-413f-af7c-12cd9946c0d9\") " Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.071652 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-utilities" (OuterVolumeSpecName: "utilities") pod "67f5d7eb-2806-413f-af7c-12cd9946c0d9" (UID: "67f5d7eb-2806-413f-af7c-12cd9946c0d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.089159 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67f5d7eb-2806-413f-af7c-12cd9946c0d9-kube-api-access-n2r9r" (OuterVolumeSpecName: "kube-api-access-n2r9r") pod "67f5d7eb-2806-413f-af7c-12cd9946c0d9" (UID: "67f5d7eb-2806-413f-af7c-12cd9946c0d9"). InnerVolumeSpecName "kube-api-access-n2r9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.124129 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "67f5d7eb-2806-413f-af7c-12cd9946c0d9" (UID: "67f5d7eb-2806-413f-af7c-12cd9946c0d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.172475 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.172938 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/67f5d7eb-2806-413f-af7c-12cd9946c0d9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.173069 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2r9r\" (UniqueName: \"kubernetes.io/projected/67f5d7eb-2806-413f-af7c-12cd9946c0d9-kube-api-access-n2r9r\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.206456 4774 generic.go:334] "Generic (PLEG): container finished" podID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerID="5ebfc288d2ca82ef71cf610a1355e30c206574f0d49f2a46402bd571319d487a" exitCode=0 Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.206527 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5smvv" event={"ID":"67f5d7eb-2806-413f-af7c-12cd9946c0d9","Type":"ContainerDied","Data":"5ebfc288d2ca82ef71cf610a1355e30c206574f0d49f2a46402bd571319d487a"} Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.206602 4774 scope.go:117] "RemoveContainer" containerID="5ebfc288d2ca82ef71cf610a1355e30c206574f0d49f2a46402bd571319d487a" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.226977 4774 scope.go:117] "RemoveContainer" containerID="a574a4b671f8aad51a93ec6d96fe9699d7121f6c7d99fb9ce43475da1b8c87b0" Nov 21 14:47:04 crc kubenswrapper[4774]: I1121 14:47:04.251708 4774 scope.go:117] "RemoveContainer" containerID="6816c0474ce31ede75f984fa719ee01b65779397e0fb431697aa9467b079ead9" Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.218944 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhxgt" event={"ID":"f5935916-b1bc-419c-b2de-21eeb777a88b","Type":"ContainerStarted","Data":"3a7f50563835e84e2e4656034463799a16972dc5282b90567ec3635f3b04e20c"} Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.220369 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5smvv" Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.220363 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5smvv" event={"ID":"67f5d7eb-2806-413f-af7c-12cd9946c0d9","Type":"ContainerDied","Data":"7c809bbd86dcbf4b7ec7a723f9756e1275b297bd147c181cff3d9afe42335bf7"} Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.220579 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gzq4q" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="registry-server" containerID="cri-o://4bfeb4bb33e83011114e769d4660d0b5e808bf797a423181ca07c57fe4119c8d" gracePeriod=2 Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.244433 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lhxgt" podStartSLOduration=5.218853642 podStartE2EDuration="20.244398011s" podCreationTimestamp="2025-11-21 14:46:45 +0000 UTC" firstStartedPulling="2025-11-21 14:46:48.01285624 +0000 UTC m=+2598.665055539" lastFinishedPulling="2025-11-21 14:47:03.038400629 +0000 UTC m=+2613.690599908" observedRunningTime="2025-11-21 14:47:05.242000013 +0000 UTC m=+2615.894199292" watchObservedRunningTime="2025-11-21 14:47:05.244398011 +0000 UTC m=+2615.896597270" Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.273087 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5smvv"] Nov 21 14:47:05 crc kubenswrapper[4774]: I1121 14:47:05.278087 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5smvv"] Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.094397 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.108581 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" path="/var/lib/kubelet/pods/67f5d7eb-2806-413f-af7c-12cd9946c0d9/volumes" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.184412 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.185416 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.242358 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.246222 4774 generic.go:334] "Generic (PLEG): container finished" podID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerID="4bfeb4bb33e83011114e769d4660d0b5e808bf797a423181ca07c57fe4119c8d" exitCode=0 Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.246447 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerDied","Data":"4bfeb4bb33e83011114e769d4660d0b5e808bf797a423181ca07c57fe4119c8d"} Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.246496 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gzq4q" event={"ID":"c2e57acc-f4ed-4ead-9811-2e606bbde022","Type":"ContainerDied","Data":"4790a140c99c1823daf0237cce60c060fd52343cd04068121785eb307b7a42e3"} Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.246509 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4790a140c99c1823daf0237cce60c060fd52343cd04068121785eb307b7a42e3" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.270604 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.422929 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-catalog-content\") pod \"c2e57acc-f4ed-4ead-9811-2e606bbde022\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.423037 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-utilities\") pod \"c2e57acc-f4ed-4ead-9811-2e606bbde022\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.423354 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b5ms\" (UniqueName: \"kubernetes.io/projected/c2e57acc-f4ed-4ead-9811-2e606bbde022-kube-api-access-6b5ms\") pod \"c2e57acc-f4ed-4ead-9811-2e606bbde022\" (UID: \"c2e57acc-f4ed-4ead-9811-2e606bbde022\") " Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.424033 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-utilities" (OuterVolumeSpecName: "utilities") pod "c2e57acc-f4ed-4ead-9811-2e606bbde022" (UID: "c2e57acc-f4ed-4ead-9811-2e606bbde022"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.425438 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.433181 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2e57acc-f4ed-4ead-9811-2e606bbde022-kube-api-access-6b5ms" (OuterVolumeSpecName: "kube-api-access-6b5ms") pod "c2e57acc-f4ed-4ead-9811-2e606bbde022" (UID: "c2e57acc-f4ed-4ead-9811-2e606bbde022"). InnerVolumeSpecName "kube-api-access-6b5ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.453637 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2e57acc-f4ed-4ead-9811-2e606bbde022" (UID: "c2e57acc-f4ed-4ead-9811-2e606bbde022"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.527770 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2e57acc-f4ed-4ead-9811-2e606bbde022-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:06 crc kubenswrapper[4774]: I1121 14:47:06.528009 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b5ms\" (UniqueName: \"kubernetes.io/projected/c2e57acc-f4ed-4ead-9811-2e606bbde022-kube-api-access-6b5ms\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:07 crc kubenswrapper[4774]: I1121 14:47:07.256570 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"a68016e71be6cc34a7781da546a354010f711b7ff42586f29dbf1fa13d17e8b4"} Nov 21 14:47:07 crc kubenswrapper[4774]: I1121 14:47:07.256619 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gzq4q" Nov 21 14:47:07 crc kubenswrapper[4774]: I1121 14:47:07.301989 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzq4q"] Nov 21 14:47:07 crc kubenswrapper[4774]: I1121 14:47:07.312024 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gzq4q"] Nov 21 14:47:08 crc kubenswrapper[4774]: I1121 14:47:08.112754 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" path="/var/lib/kubelet/pods/c2e57acc-f4ed-4ead-9811-2e606bbde022/volumes" Nov 21 14:47:16 crc kubenswrapper[4774]: I1121 14:47:16.252353 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:47:16 crc kubenswrapper[4774]: I1121 14:47:16.315359 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lhxgt"] Nov 21 14:47:16 crc kubenswrapper[4774]: I1121 14:47:16.363563 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lhxgt" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="registry-server" containerID="cri-o://3a7f50563835e84e2e4656034463799a16972dc5282b90567ec3635f3b04e20c" gracePeriod=2 Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.373359 4774 generic.go:334] "Generic (PLEG): container finished" podID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerID="3a7f50563835e84e2e4656034463799a16972dc5282b90567ec3635f3b04e20c" exitCode=0 Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.373613 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhxgt" event={"ID":"f5935916-b1bc-419c-b2de-21eeb777a88b","Type":"ContainerDied","Data":"3a7f50563835e84e2e4656034463799a16972dc5282b90567ec3635f3b04e20c"} Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.373705 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lhxgt" event={"ID":"f5935916-b1bc-419c-b2de-21eeb777a88b","Type":"ContainerDied","Data":"dfb4a225f663b39daef9267c8d2f950e59ece56f94bace756d67fce24f1e7f49"} Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.373729 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfb4a225f663b39daef9267c8d2f950e59ece56f94bace756d67fce24f1e7f49" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.425643 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.435579 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-utilities\") pod \"f5935916-b1bc-419c-b2de-21eeb777a88b\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.436485 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-catalog-content\") pod \"f5935916-b1bc-419c-b2de-21eeb777a88b\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.436547 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2lk8\" (UniqueName: \"kubernetes.io/projected/f5935916-b1bc-419c-b2de-21eeb777a88b-kube-api-access-b2lk8\") pod \"f5935916-b1bc-419c-b2de-21eeb777a88b\" (UID: \"f5935916-b1bc-419c-b2de-21eeb777a88b\") " Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.437069 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-utilities" (OuterVolumeSpecName: "utilities") pod "f5935916-b1bc-419c-b2de-21eeb777a88b" (UID: "f5935916-b1bc-419c-b2de-21eeb777a88b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.437253 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.446221 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5935916-b1bc-419c-b2de-21eeb777a88b-kube-api-access-b2lk8" (OuterVolumeSpecName: "kube-api-access-b2lk8") pod "f5935916-b1bc-419c-b2de-21eeb777a88b" (UID: "f5935916-b1bc-419c-b2de-21eeb777a88b"). InnerVolumeSpecName "kube-api-access-b2lk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.493367 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5935916-b1bc-419c-b2de-21eeb777a88b" (UID: "f5935916-b1bc-419c-b2de-21eeb777a88b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.538731 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5935916-b1bc-419c-b2de-21eeb777a88b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:17 crc kubenswrapper[4774]: I1121 14:47:17.538767 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2lk8\" (UniqueName: \"kubernetes.io/projected/f5935916-b1bc-419c-b2de-21eeb777a88b-kube-api-access-b2lk8\") on node \"crc\" DevicePath \"\"" Nov 21 14:47:18 crc kubenswrapper[4774]: I1121 14:47:18.387327 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lhxgt" Nov 21 14:47:18 crc kubenswrapper[4774]: I1121 14:47:18.433260 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lhxgt"] Nov 21 14:47:18 crc kubenswrapper[4774]: I1121 14:47:18.443298 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lhxgt"] Nov 21 14:47:20 crc kubenswrapper[4774]: I1121 14:47:20.108413 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" path="/var/lib/kubelet/pods/f5935916-b1bc-419c-b2de-21eeb777a88b/volumes" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.628858 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-97rtj"] Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630197 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="extract-utilities" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630216 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="extract-utilities" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630226 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630232 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630247 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630254 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630271 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="extract-content" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630278 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="extract-content" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630305 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630311 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630327 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="extract-utilities" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630333 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="extract-utilities" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630343 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="extract-utilities" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630349 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="extract-utilities" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630361 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="extract-content" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630367 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="extract-content" Nov 21 14:48:46 crc kubenswrapper[4774]: E1121 14:48:46.630376 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="extract-content" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630382 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="extract-content" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630535 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="67f5d7eb-2806-413f-af7c-12cd9946c0d9" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630550 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5935916-b1bc-419c-b2de-21eeb777a88b" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.630561 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2e57acc-f4ed-4ead-9811-2e606bbde022" containerName="registry-server" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.631876 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.654260 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97rtj"] Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.744892 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-utilities\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.744961 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-catalog-content\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.744981 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c48gm\" (UniqueName: \"kubernetes.io/projected/42475fc6-cbb9-4f84-903e-0526789f4f41-kube-api-access-c48gm\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.846468 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-catalog-content\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.846538 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c48gm\" (UniqueName: \"kubernetes.io/projected/42475fc6-cbb9-4f84-903e-0526789f4f41-kube-api-access-c48gm\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.846646 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-utilities\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.847669 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-catalog-content\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.847682 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-utilities\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.879264 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c48gm\" (UniqueName: \"kubernetes.io/projected/42475fc6-cbb9-4f84-903e-0526789f4f41-kube-api-access-c48gm\") pod \"redhat-operators-97rtj\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:46 crc kubenswrapper[4774]: I1121 14:48:46.965178 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:47 crc kubenswrapper[4774]: I1121 14:48:47.213786 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-97rtj"] Nov 21 14:48:48 crc kubenswrapper[4774]: I1121 14:48:48.152011 4774 generic.go:334] "Generic (PLEG): container finished" podID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerID="7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6" exitCode=0 Nov 21 14:48:48 crc kubenswrapper[4774]: I1121 14:48:48.152090 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerDied","Data":"7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6"} Nov 21 14:48:48 crc kubenswrapper[4774]: I1121 14:48:48.153332 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerStarted","Data":"fdd5f3d65663160be26971365c68b3b455544eda410610b4c460b314a76a14d9"} Nov 21 14:48:49 crc kubenswrapper[4774]: I1121 14:48:49.171619 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerStarted","Data":"f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230"} Nov 21 14:48:50 crc kubenswrapper[4774]: I1121 14:48:50.182740 4774 generic.go:334] "Generic (PLEG): container finished" podID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerID="f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230" exitCode=0 Nov 21 14:48:50 crc kubenswrapper[4774]: I1121 14:48:50.182794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerDied","Data":"f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230"} Nov 21 14:48:51 crc kubenswrapper[4774]: I1121 14:48:51.195972 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerStarted","Data":"95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858"} Nov 21 14:48:51 crc kubenswrapper[4774]: I1121 14:48:51.219302 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-97rtj" podStartSLOduration=2.661696197 podStartE2EDuration="5.219277837s" podCreationTimestamp="2025-11-21 14:48:46 +0000 UTC" firstStartedPulling="2025-11-21 14:48:48.155644717 +0000 UTC m=+2718.807843976" lastFinishedPulling="2025-11-21 14:48:50.713226357 +0000 UTC m=+2721.365425616" observedRunningTime="2025-11-21 14:48:51.216374775 +0000 UTC m=+2721.868574054" watchObservedRunningTime="2025-11-21 14:48:51.219277837 +0000 UTC m=+2721.871477096" Nov 21 14:48:56 crc kubenswrapper[4774]: I1121 14:48:56.966220 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:56 crc kubenswrapper[4774]: I1121 14:48:56.966763 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:57 crc kubenswrapper[4774]: I1121 14:48:57.017063 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:57 crc kubenswrapper[4774]: I1121 14:48:57.314395 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:57 crc kubenswrapper[4774]: I1121 14:48:57.371847 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97rtj"] Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.276595 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-97rtj" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="registry-server" containerID="cri-o://95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858" gracePeriod=2 Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.683372 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.748282 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-utilities\") pod \"42475fc6-cbb9-4f84-903e-0526789f4f41\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.748452 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-catalog-content\") pod \"42475fc6-cbb9-4f84-903e-0526789f4f41\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.748501 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c48gm\" (UniqueName: \"kubernetes.io/projected/42475fc6-cbb9-4f84-903e-0526789f4f41-kube-api-access-c48gm\") pod \"42475fc6-cbb9-4f84-903e-0526789f4f41\" (UID: \"42475fc6-cbb9-4f84-903e-0526789f4f41\") " Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.750981 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-utilities" (OuterVolumeSpecName: "utilities") pod "42475fc6-cbb9-4f84-903e-0526789f4f41" (UID: "42475fc6-cbb9-4f84-903e-0526789f4f41"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.762183 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42475fc6-cbb9-4f84-903e-0526789f4f41-kube-api-access-c48gm" (OuterVolumeSpecName: "kube-api-access-c48gm") pod "42475fc6-cbb9-4f84-903e-0526789f4f41" (UID: "42475fc6-cbb9-4f84-903e-0526789f4f41"). InnerVolumeSpecName "kube-api-access-c48gm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.858913 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c48gm\" (UniqueName: \"kubernetes.io/projected/42475fc6-cbb9-4f84-903e-0526789f4f41-kube-api-access-c48gm\") on node \"crc\" DevicePath \"\"" Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.858960 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.870250 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "42475fc6-cbb9-4f84-903e-0526789f4f41" (UID: "42475fc6-cbb9-4f84-903e-0526789f4f41"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:48:59 crc kubenswrapper[4774]: I1121 14:48:59.960033 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/42475fc6-cbb9-4f84-903e-0526789f4f41-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.287547 4774 generic.go:334] "Generic (PLEG): container finished" podID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerID="95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858" exitCode=0 Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.287620 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-97rtj" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.287619 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerDied","Data":"95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858"} Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.288011 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-97rtj" event={"ID":"42475fc6-cbb9-4f84-903e-0526789f4f41","Type":"ContainerDied","Data":"fdd5f3d65663160be26971365c68b3b455544eda410610b4c460b314a76a14d9"} Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.288064 4774 scope.go:117] "RemoveContainer" containerID="95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.311257 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-97rtj"] Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.319356 4774 scope.go:117] "RemoveContainer" containerID="f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.341851 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-97rtj"] Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.342413 4774 scope.go:117] "RemoveContainer" containerID="7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.379288 4774 scope.go:117] "RemoveContainer" containerID="95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858" Nov 21 14:49:00 crc kubenswrapper[4774]: E1121 14:49:00.379789 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858\": container with ID starting with 95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858 not found: ID does not exist" containerID="95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.379920 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858"} err="failed to get container status \"95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858\": rpc error: code = NotFound desc = could not find container \"95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858\": container with ID starting with 95f9907f298a35cfd81d5235b6eacc4d7d5ae0e0c5eda464c7c15531877b8858 not found: ID does not exist" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.380009 4774 scope.go:117] "RemoveContainer" containerID="f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230" Nov 21 14:49:00 crc kubenswrapper[4774]: E1121 14:49:00.380389 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230\": container with ID starting with f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230 not found: ID does not exist" containerID="f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.380411 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230"} err="failed to get container status \"f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230\": rpc error: code = NotFound desc = could not find container \"f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230\": container with ID starting with f14825525456aa9715731c7e224693be6dbb719d69e9018e71cf35d6aeabf230 not found: ID does not exist" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.380424 4774 scope.go:117] "RemoveContainer" containerID="7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6" Nov 21 14:49:00 crc kubenswrapper[4774]: E1121 14:49:00.380678 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6\": container with ID starting with 7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6 not found: ID does not exist" containerID="7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6" Nov 21 14:49:00 crc kubenswrapper[4774]: I1121 14:49:00.380787 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6"} err="failed to get container status \"7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6\": rpc error: code = NotFound desc = could not find container \"7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6\": container with ID starting with 7e448433c6b2606989ca238fe5b5c1bb6ac4f39d8a8bc2fd1d65a228616040e6 not found: ID does not exist" Nov 21 14:49:02 crc kubenswrapper[4774]: I1121 14:49:02.107194 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" path="/var/lib/kubelet/pods/42475fc6-cbb9-4f84-903e-0526789f4f41/volumes" Nov 21 14:49:29 crc kubenswrapper[4774]: I1121 14:49:29.601561 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:49:29 crc kubenswrapper[4774]: I1121 14:49:29.602597 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:49:59 crc kubenswrapper[4774]: I1121 14:49:59.600430 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:49:59 crc kubenswrapper[4774]: I1121 14:49:59.601059 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:50:29 crc kubenswrapper[4774]: I1121 14:50:29.601177 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:50:29 crc kubenswrapper[4774]: I1121 14:50:29.601806 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:50:29 crc kubenswrapper[4774]: I1121 14:50:29.601900 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:50:29 crc kubenswrapper[4774]: I1121 14:50:29.602903 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a68016e71be6cc34a7781da546a354010f711b7ff42586f29dbf1fa13d17e8b4"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:50:29 crc kubenswrapper[4774]: I1121 14:50:29.602973 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://a68016e71be6cc34a7781da546a354010f711b7ff42586f29dbf1fa13d17e8b4" gracePeriod=600 Nov 21 14:50:30 crc kubenswrapper[4774]: I1121 14:50:30.138479 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="a68016e71be6cc34a7781da546a354010f711b7ff42586f29dbf1fa13d17e8b4" exitCode=0 Nov 21 14:50:30 crc kubenswrapper[4774]: I1121 14:50:30.138540 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"a68016e71be6cc34a7781da546a354010f711b7ff42586f29dbf1fa13d17e8b4"} Nov 21 14:50:30 crc kubenswrapper[4774]: I1121 14:50:30.138873 4774 scope.go:117] "RemoveContainer" containerID="80fd83cd5b0fb2ba02d14477e4b39b89b2b8a513120b8d4f726e1309696621c6" Nov 21 14:50:31 crc kubenswrapper[4774]: I1121 14:50:31.158987 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107"} Nov 21 14:52:59 crc kubenswrapper[4774]: I1121 14:52:59.600585 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:52:59 crc kubenswrapper[4774]: I1121 14:52:59.601134 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:53:29 crc kubenswrapper[4774]: I1121 14:53:29.601476 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:53:29 crc kubenswrapper[4774]: I1121 14:53:29.602216 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:53:38 crc kubenswrapper[4774]: I1121 14:53:38.764652 4774 scope.go:117] "RemoveContainer" containerID="65bdd452e35f353c401f19d33d6c5c70cc566ebab68c13dec4647e24fe68bc06" Nov 21 14:53:38 crc kubenswrapper[4774]: I1121 14:53:38.804363 4774 scope.go:117] "RemoveContainer" containerID="4bfeb4bb33e83011114e769d4660d0b5e808bf797a423181ca07c57fe4119c8d" Nov 21 14:53:38 crc kubenswrapper[4774]: I1121 14:53:38.824984 4774 scope.go:117] "RemoveContainer" containerID="08a2464db6921ffe04072ecb06db692ab1b6de9626cf2e4014efdedbdd7fa2fb" Nov 21 14:53:38 crc kubenswrapper[4774]: I1121 14:53:38.843336 4774 scope.go:117] "RemoveContainer" containerID="2811a363acd4c4b1ac6844530eda744386843b172ea6cb473adeadc09984a1ee" Nov 21 14:53:38 crc kubenswrapper[4774]: I1121 14:53:38.863674 4774 scope.go:117] "RemoveContainer" containerID="3a7f50563835e84e2e4656034463799a16972dc5282b90567ec3635f3b04e20c" Nov 21 14:53:38 crc kubenswrapper[4774]: I1121 14:53:38.885975 4774 scope.go:117] "RemoveContainer" containerID="a12cf282dc443a16fe5330604db2074937c7f23b0bc7d9e3b0fb6a36f01bf85d" Nov 21 14:53:59 crc kubenswrapper[4774]: I1121 14:53:59.601500 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 14:53:59 crc kubenswrapper[4774]: I1121 14:53:59.602437 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 14:53:59 crc kubenswrapper[4774]: I1121 14:53:59.602496 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 14:53:59 crc kubenswrapper[4774]: I1121 14:53:59.603321 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 14:53:59 crc kubenswrapper[4774]: I1121 14:53:59.603380 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" gracePeriod=600 Nov 21 14:53:59 crc kubenswrapper[4774]: E1121 14:53:59.745937 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:54:00 crc kubenswrapper[4774]: I1121 14:54:00.441109 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107"} Nov 21 14:54:00 crc kubenswrapper[4774]: I1121 14:54:00.441027 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" exitCode=0 Nov 21 14:54:00 crc kubenswrapper[4774]: I1121 14:54:00.441483 4774 scope.go:117] "RemoveContainer" containerID="a68016e71be6cc34a7781da546a354010f711b7ff42586f29dbf1fa13d17e8b4" Nov 21 14:54:00 crc kubenswrapper[4774]: I1121 14:54:00.442132 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:54:00 crc kubenswrapper[4774]: E1121 14:54:00.442454 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:54:12 crc kubenswrapper[4774]: I1121 14:54:12.093737 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:54:12 crc kubenswrapper[4774]: E1121 14:54:12.094510 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:54:24 crc kubenswrapper[4774]: I1121 14:54:24.093733 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:54:24 crc kubenswrapper[4774]: E1121 14:54:24.094472 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:54:39 crc kubenswrapper[4774]: I1121 14:54:39.094011 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:54:39 crc kubenswrapper[4774]: E1121 14:54:39.095228 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:54:51 crc kubenswrapper[4774]: I1121 14:54:51.093253 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:54:51 crc kubenswrapper[4774]: E1121 14:54:51.095032 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:55:04 crc kubenswrapper[4774]: I1121 14:55:04.093126 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:55:04 crc kubenswrapper[4774]: E1121 14:55:04.093891 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:55:15 crc kubenswrapper[4774]: I1121 14:55:15.093916 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:55:15 crc kubenswrapper[4774]: E1121 14:55:15.096303 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:55:29 crc kubenswrapper[4774]: I1121 14:55:29.093224 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:55:29 crc kubenswrapper[4774]: E1121 14:55:29.093971 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:55:43 crc kubenswrapper[4774]: I1121 14:55:43.093511 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:55:43 crc kubenswrapper[4774]: E1121 14:55:43.094360 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:55:55 crc kubenswrapper[4774]: I1121 14:55:55.093637 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:55:55 crc kubenswrapper[4774]: E1121 14:55:55.094781 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:56:10 crc kubenswrapper[4774]: I1121 14:56:10.100647 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:56:10 crc kubenswrapper[4774]: E1121 14:56:10.101531 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:56:23 crc kubenswrapper[4774]: I1121 14:56:23.093585 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:56:23 crc kubenswrapper[4774]: E1121 14:56:23.094373 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:56:37 crc kubenswrapper[4774]: I1121 14:56:37.094233 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:56:37 crc kubenswrapper[4774]: E1121 14:56:37.095109 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:56:51 crc kubenswrapper[4774]: I1121 14:56:51.093456 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:56:51 crc kubenswrapper[4774]: E1121 14:56:51.094294 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:57:03 crc kubenswrapper[4774]: I1121 14:57:03.093501 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:57:03 crc kubenswrapper[4774]: E1121 14:57:03.094491 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.592313 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gvb4x"] Nov 21 14:57:11 crc kubenswrapper[4774]: E1121 14:57:11.593368 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="registry-server" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.593388 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="registry-server" Nov 21 14:57:11 crc kubenswrapper[4774]: E1121 14:57:11.593427 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="extract-utilities" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.593438 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="extract-utilities" Nov 21 14:57:11 crc kubenswrapper[4774]: E1121 14:57:11.593457 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="extract-content" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.593465 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="extract-content" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.593625 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="42475fc6-cbb9-4f84-903e-0526789f4f41" containerName="registry-server" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.594676 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.613438 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gvb4x"] Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.725278 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpvbf\" (UniqueName: \"kubernetes.io/projected/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-kube-api-access-gpvbf\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.725344 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-catalog-content\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.725465 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-utilities\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.826255 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpvbf\" (UniqueName: \"kubernetes.io/projected/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-kube-api-access-gpvbf\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.826299 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-catalog-content\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.826351 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-utilities\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.826805 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-utilities\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.826898 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-catalog-content\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.847178 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpvbf\" (UniqueName: \"kubernetes.io/projected/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-kube-api-access-gpvbf\") pod \"community-operators-gvb4x\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:11 crc kubenswrapper[4774]: I1121 14:57:11.916255 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:12 crc kubenswrapper[4774]: I1121 14:57:12.407613 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gvb4x"] Nov 21 14:57:13 crc kubenswrapper[4774]: I1121 14:57:13.053468 4774 generic.go:334] "Generic (PLEG): container finished" podID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerID="07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62" exitCode=0 Nov 21 14:57:13 crc kubenswrapper[4774]: I1121 14:57:13.053547 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvb4x" event={"ID":"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48","Type":"ContainerDied","Data":"07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62"} Nov 21 14:57:13 crc kubenswrapper[4774]: I1121 14:57:13.053869 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvb4x" event={"ID":"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48","Type":"ContainerStarted","Data":"3930609a86191675479c5a981d1539c03dafffdf5a54e8e017960d56a932ad85"} Nov 21 14:57:13 crc kubenswrapper[4774]: I1121 14:57:13.056373 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 14:57:14 crc kubenswrapper[4774]: I1121 14:57:14.065935 4774 generic.go:334] "Generic (PLEG): container finished" podID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerID="8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68" exitCode=0 Nov 21 14:57:14 crc kubenswrapper[4774]: I1121 14:57:14.066110 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvb4x" event={"ID":"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48","Type":"ContainerDied","Data":"8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68"} Nov 21 14:57:15 crc kubenswrapper[4774]: I1121 14:57:15.075737 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvb4x" event={"ID":"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48","Type":"ContainerStarted","Data":"fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef"} Nov 21 14:57:15 crc kubenswrapper[4774]: I1121 14:57:15.100024 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gvb4x" podStartSLOduration=2.726647041 podStartE2EDuration="4.100004505s" podCreationTimestamp="2025-11-21 14:57:11 +0000 UTC" firstStartedPulling="2025-11-21 14:57:13.056127727 +0000 UTC m=+3223.708326986" lastFinishedPulling="2025-11-21 14:57:14.429485191 +0000 UTC m=+3225.081684450" observedRunningTime="2025-11-21 14:57:15.092899653 +0000 UTC m=+3225.745098912" watchObservedRunningTime="2025-11-21 14:57:15.100004505 +0000 UTC m=+3225.752203764" Nov 21 14:57:17 crc kubenswrapper[4774]: I1121 14:57:17.093460 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:57:17 crc kubenswrapper[4774]: E1121 14:57:17.094245 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:57:21 crc kubenswrapper[4774]: I1121 14:57:21.916506 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:21 crc kubenswrapper[4774]: I1121 14:57:21.916772 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:21 crc kubenswrapper[4774]: I1121 14:57:21.955278 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:22 crc kubenswrapper[4774]: I1121 14:57:22.166651 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:25 crc kubenswrapper[4774]: I1121 14:57:25.721891 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gvb4x"] Nov 21 14:57:25 crc kubenswrapper[4774]: I1121 14:57:25.723259 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gvb4x" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="registry-server" containerID="cri-o://fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef" gracePeriod=2 Nov 21 14:57:25 crc kubenswrapper[4774]: I1121 14:57:25.932774 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-78qvm"] Nov 21 14:57:25 crc kubenswrapper[4774]: I1121 14:57:25.937431 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:25 crc kubenswrapper[4774]: I1121 14:57:25.945662 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-78qvm"] Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.029571 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-catalog-content\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.029663 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsg4f\" (UniqueName: \"kubernetes.io/projected/13167129-5850-4954-9d5c-963ac15cdaf4-kube-api-access-tsg4f\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.029683 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-utilities\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.118665 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.131540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-catalog-content\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.131650 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsg4f\" (UniqueName: \"kubernetes.io/projected/13167129-5850-4954-9d5c-963ac15cdaf4-kube-api-access-tsg4f\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.131681 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-utilities\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.132313 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-catalog-content\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.132378 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-utilities\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.154753 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsg4f\" (UniqueName: \"kubernetes.io/projected/13167129-5850-4954-9d5c-963ac15cdaf4-kube-api-access-tsg4f\") pod \"redhat-marketplace-78qvm\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.165950 4774 generic.go:334] "Generic (PLEG): container finished" podID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerID="fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef" exitCode=0 Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.166086 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gvb4x" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.166104 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvb4x" event={"ID":"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48","Type":"ContainerDied","Data":"fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef"} Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.166504 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gvb4x" event={"ID":"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48","Type":"ContainerDied","Data":"3930609a86191675479c5a981d1539c03dafffdf5a54e8e017960d56a932ad85"} Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.166547 4774 scope.go:117] "RemoveContainer" containerID="fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.199712 4774 scope.go:117] "RemoveContainer" containerID="8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.219438 4774 scope.go:117] "RemoveContainer" containerID="07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.232350 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-utilities\") pod \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.232461 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpvbf\" (UniqueName: \"kubernetes.io/projected/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-kube-api-access-gpvbf\") pod \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.232499 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-catalog-content\") pod \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\" (UID: \"a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48\") " Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.233598 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-utilities" (OuterVolumeSpecName: "utilities") pod "a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" (UID: "a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.237476 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-kube-api-access-gpvbf" (OuterVolumeSpecName: "kube-api-access-gpvbf") pod "a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" (UID: "a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48"). InnerVolumeSpecName "kube-api-access-gpvbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.250057 4774 scope.go:117] "RemoveContainer" containerID="fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef" Nov 21 14:57:26 crc kubenswrapper[4774]: E1121 14:57:26.250448 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef\": container with ID starting with fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef not found: ID does not exist" containerID="fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.250477 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef"} err="failed to get container status \"fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef\": rpc error: code = NotFound desc = could not find container \"fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef\": container with ID starting with fad479a4f14730e1c0fa9190585cdc8e7f4706bb372f4ef3199895e8612c4cef not found: ID does not exist" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.250503 4774 scope.go:117] "RemoveContainer" containerID="8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68" Nov 21 14:57:26 crc kubenswrapper[4774]: E1121 14:57:26.250721 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68\": container with ID starting with 8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68 not found: ID does not exist" containerID="8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.250745 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68"} err="failed to get container status \"8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68\": rpc error: code = NotFound desc = could not find container \"8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68\": container with ID starting with 8b46417ad55210f4f32fe1a21c5553a5fa97c30e7caf95285ec70028ce77db68 not found: ID does not exist" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.250760 4774 scope.go:117] "RemoveContainer" containerID="07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62" Nov 21 14:57:26 crc kubenswrapper[4774]: E1121 14:57:26.251053 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62\": container with ID starting with 07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62 not found: ID does not exist" containerID="07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.251099 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62"} err="failed to get container status \"07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62\": rpc error: code = NotFound desc = could not find container \"07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62\": container with ID starting with 07ff6e9a51f7457b2c20d9e45d9f9cac9886957acc8ee43b70d361d3fd0b0c62 not found: ID does not exist" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.273234 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.288714 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" (UID: "a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.334531 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpvbf\" (UniqueName: \"kubernetes.io/projected/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-kube-api-access-gpvbf\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.334579 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.334592 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.513167 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gvb4x"] Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.521019 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gvb4x"] Nov 21 14:57:26 crc kubenswrapper[4774]: I1121 14:57:26.751448 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-78qvm"] Nov 21 14:57:27 crc kubenswrapper[4774]: I1121 14:57:27.178071 4774 generic.go:334] "Generic (PLEG): container finished" podID="13167129-5850-4954-9d5c-963ac15cdaf4" containerID="bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77" exitCode=0 Nov 21 14:57:27 crc kubenswrapper[4774]: I1121 14:57:27.178159 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-78qvm" event={"ID":"13167129-5850-4954-9d5c-963ac15cdaf4","Type":"ContainerDied","Data":"bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77"} Nov 21 14:57:27 crc kubenswrapper[4774]: I1121 14:57:27.178561 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-78qvm" event={"ID":"13167129-5850-4954-9d5c-963ac15cdaf4","Type":"ContainerStarted","Data":"1fbd3cb991ff5a3194db29bc786a24b1714e05352977bae775453811e4e762e8"} Nov 21 14:57:28 crc kubenswrapper[4774]: I1121 14:57:28.105795 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" path="/var/lib/kubelet/pods/a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48/volumes" Nov 21 14:57:28 crc kubenswrapper[4774]: I1121 14:57:28.191650 4774 generic.go:334] "Generic (PLEG): container finished" podID="13167129-5850-4954-9d5c-963ac15cdaf4" containerID="65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457" exitCode=0 Nov 21 14:57:28 crc kubenswrapper[4774]: I1121 14:57:28.191703 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-78qvm" event={"ID":"13167129-5850-4954-9d5c-963ac15cdaf4","Type":"ContainerDied","Data":"65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457"} Nov 21 14:57:29 crc kubenswrapper[4774]: I1121 14:57:29.201967 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-78qvm" event={"ID":"13167129-5850-4954-9d5c-963ac15cdaf4","Type":"ContainerStarted","Data":"683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6"} Nov 21 14:57:29 crc kubenswrapper[4774]: I1121 14:57:29.224080 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-78qvm" podStartSLOduration=2.812649404 podStartE2EDuration="4.224056689s" podCreationTimestamp="2025-11-21 14:57:25 +0000 UTC" firstStartedPulling="2025-11-21 14:57:27.179753249 +0000 UTC m=+3237.831952508" lastFinishedPulling="2025-11-21 14:57:28.591160534 +0000 UTC m=+3239.243359793" observedRunningTime="2025-11-21 14:57:29.222940627 +0000 UTC m=+3239.875139896" watchObservedRunningTime="2025-11-21 14:57:29.224056689 +0000 UTC m=+3239.876255948" Nov 21 14:57:31 crc kubenswrapper[4774]: I1121 14:57:31.092758 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:57:31 crc kubenswrapper[4774]: E1121 14:57:31.093465 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.332580 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4nn8m"] Nov 21 14:57:32 crc kubenswrapper[4774]: E1121 14:57:32.333038 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="extract-content" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.333498 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="extract-content" Nov 21 14:57:32 crc kubenswrapper[4774]: E1121 14:57:32.333546 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="extract-utilities" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.333557 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="extract-utilities" Nov 21 14:57:32 crc kubenswrapper[4774]: E1121 14:57:32.333571 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="registry-server" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.333578 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="registry-server" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.333779 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c78c9a-db0e-4fd1-81f0-2a4e4be0ab48" containerName="registry-server" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.335094 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.343884 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4nn8m"] Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.424669 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjsl4\" (UniqueName: \"kubernetes.io/projected/4c9a245f-8926-4225-a267-5cf0c3b5714f-kube-api-access-qjsl4\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.424730 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-utilities\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.424757 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-catalog-content\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.526895 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjsl4\" (UniqueName: \"kubernetes.io/projected/4c9a245f-8926-4225-a267-5cf0c3b5714f-kube-api-access-qjsl4\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.526995 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-utilities\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.527024 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-catalog-content\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.527590 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-catalog-content\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.527706 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-utilities\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.559187 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjsl4\" (UniqueName: \"kubernetes.io/projected/4c9a245f-8926-4225-a267-5cf0c3b5714f-kube-api-access-qjsl4\") pod \"certified-operators-4nn8m\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:32 crc kubenswrapper[4774]: I1121 14:57:32.696652 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:33 crc kubenswrapper[4774]: I1121 14:57:33.197829 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4nn8m"] Nov 21 14:57:33 crc kubenswrapper[4774]: I1121 14:57:33.229562 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4nn8m" event={"ID":"4c9a245f-8926-4225-a267-5cf0c3b5714f","Type":"ContainerStarted","Data":"46eefcd9227c376a806f44f42e793ed69f9893877371cf9dae22ac55b11f63c3"} Nov 21 14:57:34 crc kubenswrapper[4774]: I1121 14:57:34.241474 4774 generic.go:334] "Generic (PLEG): container finished" podID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerID="2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc" exitCode=0 Nov 21 14:57:34 crc kubenswrapper[4774]: I1121 14:57:34.241547 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4nn8m" event={"ID":"4c9a245f-8926-4225-a267-5cf0c3b5714f","Type":"ContainerDied","Data":"2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc"} Nov 21 14:57:35 crc kubenswrapper[4774]: I1121 14:57:35.253711 4774 generic.go:334] "Generic (PLEG): container finished" podID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerID="b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd" exitCode=0 Nov 21 14:57:35 crc kubenswrapper[4774]: I1121 14:57:35.253828 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4nn8m" event={"ID":"4c9a245f-8926-4225-a267-5cf0c3b5714f","Type":"ContainerDied","Data":"b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd"} Nov 21 14:57:36 crc kubenswrapper[4774]: I1121 14:57:36.274283 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:36 crc kubenswrapper[4774]: I1121 14:57:36.274726 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:36 crc kubenswrapper[4774]: I1121 14:57:36.326717 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:37 crc kubenswrapper[4774]: I1121 14:57:37.271944 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4nn8m" event={"ID":"4c9a245f-8926-4225-a267-5cf0c3b5714f","Type":"ContainerStarted","Data":"4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82"} Nov 21 14:57:37 crc kubenswrapper[4774]: I1121 14:57:37.298535 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4nn8m" podStartSLOduration=3.334074441 podStartE2EDuration="5.298501496s" podCreationTimestamp="2025-11-21 14:57:32 +0000 UTC" firstStartedPulling="2025-11-21 14:57:34.244724735 +0000 UTC m=+3244.896924014" lastFinishedPulling="2025-11-21 14:57:36.20915181 +0000 UTC m=+3246.861351069" observedRunningTime="2025-11-21 14:57:37.291530538 +0000 UTC m=+3247.943729797" watchObservedRunningTime="2025-11-21 14:57:37.298501496 +0000 UTC m=+3247.950700755" Nov 21 14:57:37 crc kubenswrapper[4774]: I1121 14:57:37.322294 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.524869 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-78qvm"] Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.525595 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-78qvm" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="registry-server" containerID="cri-o://683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6" gracePeriod=2 Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.921454 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.951403 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-utilities\") pod \"13167129-5850-4954-9d5c-963ac15cdaf4\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.951573 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsg4f\" (UniqueName: \"kubernetes.io/projected/13167129-5850-4954-9d5c-963ac15cdaf4-kube-api-access-tsg4f\") pod \"13167129-5850-4954-9d5c-963ac15cdaf4\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.951633 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-catalog-content\") pod \"13167129-5850-4954-9d5c-963ac15cdaf4\" (UID: \"13167129-5850-4954-9d5c-963ac15cdaf4\") " Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.952884 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-utilities" (OuterVolumeSpecName: "utilities") pod "13167129-5850-4954-9d5c-963ac15cdaf4" (UID: "13167129-5850-4954-9d5c-963ac15cdaf4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.961335 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13167129-5850-4954-9d5c-963ac15cdaf4-kube-api-access-tsg4f" (OuterVolumeSpecName: "kube-api-access-tsg4f") pod "13167129-5850-4954-9d5c-963ac15cdaf4" (UID: "13167129-5850-4954-9d5c-963ac15cdaf4"). InnerVolumeSpecName "kube-api-access-tsg4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:57:39 crc kubenswrapper[4774]: I1121 14:57:39.975849 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "13167129-5850-4954-9d5c-963ac15cdaf4" (UID: "13167129-5850-4954-9d5c-963ac15cdaf4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.053575 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.053605 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsg4f\" (UniqueName: \"kubernetes.io/projected/13167129-5850-4954-9d5c-963ac15cdaf4-kube-api-access-tsg4f\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.053615 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/13167129-5850-4954-9d5c-963ac15cdaf4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.299099 4774 generic.go:334] "Generic (PLEG): container finished" podID="13167129-5850-4954-9d5c-963ac15cdaf4" containerID="683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6" exitCode=0 Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.299167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-78qvm" event={"ID":"13167129-5850-4954-9d5c-963ac15cdaf4","Type":"ContainerDied","Data":"683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6"} Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.299189 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-78qvm" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.299223 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-78qvm" event={"ID":"13167129-5850-4954-9d5c-963ac15cdaf4","Type":"ContainerDied","Data":"1fbd3cb991ff5a3194db29bc786a24b1714e05352977bae775453811e4e762e8"} Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.299292 4774 scope.go:117] "RemoveContainer" containerID="683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.326358 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-78qvm"] Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.329804 4774 scope.go:117] "RemoveContainer" containerID="65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.331738 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-78qvm"] Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.349160 4774 scope.go:117] "RemoveContainer" containerID="bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.373235 4774 scope.go:117] "RemoveContainer" containerID="683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6" Nov 21 14:57:40 crc kubenswrapper[4774]: E1121 14:57:40.373734 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6\": container with ID starting with 683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6 not found: ID does not exist" containerID="683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.373768 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6"} err="failed to get container status \"683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6\": rpc error: code = NotFound desc = could not find container \"683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6\": container with ID starting with 683659b3c5585ba8c60fa58dce92579b8281fa65ba48fabaef715a3d587a72a6 not found: ID does not exist" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.373792 4774 scope.go:117] "RemoveContainer" containerID="65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457" Nov 21 14:57:40 crc kubenswrapper[4774]: E1121 14:57:40.374257 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457\": container with ID starting with 65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457 not found: ID does not exist" containerID="65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.374283 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457"} err="failed to get container status \"65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457\": rpc error: code = NotFound desc = could not find container \"65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457\": container with ID starting with 65e7360c23651d93f2fdd3dbafd5d3ecd201552a51829d3737874d5751cb6457 not found: ID does not exist" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.374296 4774 scope.go:117] "RemoveContainer" containerID="bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77" Nov 21 14:57:40 crc kubenswrapper[4774]: E1121 14:57:40.374660 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77\": container with ID starting with bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77 not found: ID does not exist" containerID="bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77" Nov 21 14:57:40 crc kubenswrapper[4774]: I1121 14:57:40.374686 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77"} err="failed to get container status \"bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77\": rpc error: code = NotFound desc = could not find container \"bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77\": container with ID starting with bc591033ddb62f1b05760a4987cd16f32c87742de4bc9262fe302d25ad647f77 not found: ID does not exist" Nov 21 14:57:42 crc kubenswrapper[4774]: I1121 14:57:42.113994 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" path="/var/lib/kubelet/pods/13167129-5850-4954-9d5c-963ac15cdaf4/volumes" Nov 21 14:57:42 crc kubenswrapper[4774]: I1121 14:57:42.697462 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:42 crc kubenswrapper[4774]: I1121 14:57:42.697548 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:42 crc kubenswrapper[4774]: I1121 14:57:42.747193 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:43 crc kubenswrapper[4774]: I1121 14:57:43.094102 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:57:43 crc kubenswrapper[4774]: E1121 14:57:43.094598 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:57:43 crc kubenswrapper[4774]: I1121 14:57:43.378111 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:43 crc kubenswrapper[4774]: I1121 14:57:43.922733 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4nn8m"] Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.344379 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4nn8m" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="registry-server" containerID="cri-o://4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82" gracePeriod=2 Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.746660 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.848200 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-catalog-content\") pod \"4c9a245f-8926-4225-a267-5cf0c3b5714f\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.848440 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-utilities\") pod \"4c9a245f-8926-4225-a267-5cf0c3b5714f\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.848515 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjsl4\" (UniqueName: \"kubernetes.io/projected/4c9a245f-8926-4225-a267-5cf0c3b5714f-kube-api-access-qjsl4\") pod \"4c9a245f-8926-4225-a267-5cf0c3b5714f\" (UID: \"4c9a245f-8926-4225-a267-5cf0c3b5714f\") " Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.849590 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-utilities" (OuterVolumeSpecName: "utilities") pod "4c9a245f-8926-4225-a267-5cf0c3b5714f" (UID: "4c9a245f-8926-4225-a267-5cf0c3b5714f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.856611 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c9a245f-8926-4225-a267-5cf0c3b5714f-kube-api-access-qjsl4" (OuterVolumeSpecName: "kube-api-access-qjsl4") pod "4c9a245f-8926-4225-a267-5cf0c3b5714f" (UID: "4c9a245f-8926-4225-a267-5cf0c3b5714f"). InnerVolumeSpecName "kube-api-access-qjsl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.909307 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c9a245f-8926-4225-a267-5cf0c3b5714f" (UID: "4c9a245f-8926-4225-a267-5cf0c3b5714f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.952890 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.952985 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjsl4\" (UniqueName: \"kubernetes.io/projected/4c9a245f-8926-4225-a267-5cf0c3b5714f-kube-api-access-qjsl4\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:45 crc kubenswrapper[4774]: I1121 14:57:45.953009 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c9a245f-8926-4225-a267-5cf0c3b5714f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.353602 4774 generic.go:334] "Generic (PLEG): container finished" podID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerID="4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82" exitCode=0 Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.353651 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4nn8m" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.353650 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4nn8m" event={"ID":"4c9a245f-8926-4225-a267-5cf0c3b5714f","Type":"ContainerDied","Data":"4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82"} Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.353797 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4nn8m" event={"ID":"4c9a245f-8926-4225-a267-5cf0c3b5714f","Type":"ContainerDied","Data":"46eefcd9227c376a806f44f42e793ed69f9893877371cf9dae22ac55b11f63c3"} Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.353875 4774 scope.go:117] "RemoveContainer" containerID="4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.374721 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4nn8m"] Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.375066 4774 scope.go:117] "RemoveContainer" containerID="b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.389376 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4nn8m"] Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.404299 4774 scope.go:117] "RemoveContainer" containerID="2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.421105 4774 scope.go:117] "RemoveContainer" containerID="4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82" Nov 21 14:57:46 crc kubenswrapper[4774]: E1121 14:57:46.421690 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82\": container with ID starting with 4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82 not found: ID does not exist" containerID="4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.421740 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82"} err="failed to get container status \"4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82\": rpc error: code = NotFound desc = could not find container \"4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82\": container with ID starting with 4b61adade0d26194d4cd20c1a2cabd196f7d411ca882697f44841590d4fd7c82 not found: ID does not exist" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.421765 4774 scope.go:117] "RemoveContainer" containerID="b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd" Nov 21 14:57:46 crc kubenswrapper[4774]: E1121 14:57:46.422085 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd\": container with ID starting with b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd not found: ID does not exist" containerID="b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.422165 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd"} err="failed to get container status \"b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd\": rpc error: code = NotFound desc = could not find container \"b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd\": container with ID starting with b04d432ac760426328de5c0d4b628b0de02247617134380077a8bc8a64f454dd not found: ID does not exist" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.422205 4774 scope.go:117] "RemoveContainer" containerID="2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc" Nov 21 14:57:46 crc kubenswrapper[4774]: E1121 14:57:46.422571 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc\": container with ID starting with 2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc not found: ID does not exist" containerID="2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc" Nov 21 14:57:46 crc kubenswrapper[4774]: I1121 14:57:46.422611 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc"} err="failed to get container status \"2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc\": rpc error: code = NotFound desc = could not find container \"2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc\": container with ID starting with 2ef1b1923aee10b4b0fc9a64a7bfd453c193990f862b4662a0bbdb294575c8cc not found: ID does not exist" Nov 21 14:57:48 crc kubenswrapper[4774]: I1121 14:57:48.104145 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" path="/var/lib/kubelet/pods/4c9a245f-8926-4225-a267-5cf0c3b5714f/volumes" Nov 21 14:57:58 crc kubenswrapper[4774]: I1121 14:57:58.094423 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:57:58 crc kubenswrapper[4774]: E1121 14:57:58.095023 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:58:12 crc kubenswrapper[4774]: I1121 14:58:12.093297 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:58:12 crc kubenswrapper[4774]: E1121 14:58:12.094074 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:58:23 crc kubenswrapper[4774]: I1121 14:58:23.093675 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:58:23 crc kubenswrapper[4774]: E1121 14:58:23.094706 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:58:37 crc kubenswrapper[4774]: I1121 14:58:37.093806 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:58:37 crc kubenswrapper[4774]: E1121 14:58:37.094974 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:58:50 crc kubenswrapper[4774]: I1121 14:58:50.097490 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:58:50 crc kubenswrapper[4774]: E1121 14:58:50.098351 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.276212 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rl8lx"] Nov 21 14:58:52 crc kubenswrapper[4774]: E1121 14:58:52.276889 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="extract-utilities" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.276905 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="extract-utilities" Nov 21 14:58:52 crc kubenswrapper[4774]: E1121 14:58:52.276926 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="extract-content" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.276935 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="extract-content" Nov 21 14:58:52 crc kubenswrapper[4774]: E1121 14:58:52.276955 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="registry-server" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.276965 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="registry-server" Nov 21 14:58:52 crc kubenswrapper[4774]: E1121 14:58:52.276978 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="extract-utilities" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.276988 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="extract-utilities" Nov 21 14:58:52 crc kubenswrapper[4774]: E1121 14:58:52.277006 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="registry-server" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.277016 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="registry-server" Nov 21 14:58:52 crc kubenswrapper[4774]: E1121 14:58:52.277034 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="extract-content" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.277042 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="extract-content" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.277219 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="13167129-5850-4954-9d5c-963ac15cdaf4" containerName="registry-server" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.277236 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c9a245f-8926-4225-a267-5cf0c3b5714f" containerName="registry-server" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.278680 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.295079 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rl8lx"] Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.320039 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvrkx\" (UniqueName: \"kubernetes.io/projected/549b0ee3-cc17-43ff-8537-39d03185ef57-kube-api-access-zvrkx\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.320138 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-utilities\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.320183 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-catalog-content\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.421920 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-utilities\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.422012 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-catalog-content\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.422118 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvrkx\" (UniqueName: \"kubernetes.io/projected/549b0ee3-cc17-43ff-8537-39d03185ef57-kube-api-access-zvrkx\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.422655 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-catalog-content\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.422683 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-utilities\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.442940 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvrkx\" (UniqueName: \"kubernetes.io/projected/549b0ee3-cc17-43ff-8537-39d03185ef57-kube-api-access-zvrkx\") pod \"redhat-operators-rl8lx\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:52 crc kubenswrapper[4774]: I1121 14:58:52.603215 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:58:53 crc kubenswrapper[4774]: I1121 14:58:53.066903 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rl8lx"] Nov 21 14:58:53 crc kubenswrapper[4774]: I1121 14:58:53.876749 4774 generic.go:334] "Generic (PLEG): container finished" podID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerID="74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151" exitCode=0 Nov 21 14:58:53 crc kubenswrapper[4774]: I1121 14:58:53.876806 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerDied","Data":"74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151"} Nov 21 14:58:53 crc kubenswrapper[4774]: I1121 14:58:53.877153 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerStarted","Data":"a9a53cb07fa43fa3b236bfc40a0fb4ac3d2431ea58c0ab265252fa93b78861cf"} Nov 21 14:58:54 crc kubenswrapper[4774]: I1121 14:58:54.887424 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerStarted","Data":"dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d"} Nov 21 14:58:55 crc kubenswrapper[4774]: I1121 14:58:55.896383 4774 generic.go:334] "Generic (PLEG): container finished" podID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerID="dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d" exitCode=0 Nov 21 14:58:55 crc kubenswrapper[4774]: I1121 14:58:55.896458 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerDied","Data":"dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d"} Nov 21 14:58:56 crc kubenswrapper[4774]: I1121 14:58:56.905281 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerStarted","Data":"599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878"} Nov 21 14:58:56 crc kubenswrapper[4774]: I1121 14:58:56.928253 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rl8lx" podStartSLOduration=2.519408405 podStartE2EDuration="4.928196124s" podCreationTimestamp="2025-11-21 14:58:52 +0000 UTC" firstStartedPulling="2025-11-21 14:58:53.879836002 +0000 UTC m=+3324.532035261" lastFinishedPulling="2025-11-21 14:58:56.288623731 +0000 UTC m=+3326.940822980" observedRunningTime="2025-11-21 14:58:56.921451961 +0000 UTC m=+3327.573651250" watchObservedRunningTime="2025-11-21 14:58:56.928196124 +0000 UTC m=+3327.580395403" Nov 21 14:59:02 crc kubenswrapper[4774]: I1121 14:59:02.604436 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:59:02 crc kubenswrapper[4774]: I1121 14:59:02.606021 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:59:02 crc kubenswrapper[4774]: I1121 14:59:02.654904 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:59:03 crc kubenswrapper[4774]: I1121 14:59:03.010921 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:59:03 crc kubenswrapper[4774]: I1121 14:59:03.060706 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rl8lx"] Nov 21 14:59:03 crc kubenswrapper[4774]: I1121 14:59:03.093562 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 14:59:03 crc kubenswrapper[4774]: I1121 14:59:03.956296 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"e873bbc47c06fda572bd9ffed046734326ec7588fb044d34c1818890f2e11c1f"} Nov 21 14:59:04 crc kubenswrapper[4774]: I1121 14:59:04.965978 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rl8lx" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="registry-server" containerID="cri-o://599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878" gracePeriod=2 Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.570564 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.649309 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-utilities\") pod \"549b0ee3-cc17-43ff-8537-39d03185ef57\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.649442 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvrkx\" (UniqueName: \"kubernetes.io/projected/549b0ee3-cc17-43ff-8537-39d03185ef57-kube-api-access-zvrkx\") pod \"549b0ee3-cc17-43ff-8537-39d03185ef57\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.649499 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-catalog-content\") pod \"549b0ee3-cc17-43ff-8537-39d03185ef57\" (UID: \"549b0ee3-cc17-43ff-8537-39d03185ef57\") " Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.650517 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-utilities" (OuterVolumeSpecName: "utilities") pod "549b0ee3-cc17-43ff-8537-39d03185ef57" (UID: "549b0ee3-cc17-43ff-8537-39d03185ef57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.659735 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/549b0ee3-cc17-43ff-8537-39d03185ef57-kube-api-access-zvrkx" (OuterVolumeSpecName: "kube-api-access-zvrkx") pod "549b0ee3-cc17-43ff-8537-39d03185ef57" (UID: "549b0ee3-cc17-43ff-8537-39d03185ef57"). InnerVolumeSpecName "kube-api-access-zvrkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.745297 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "549b0ee3-cc17-43ff-8537-39d03185ef57" (UID: "549b0ee3-cc17-43ff-8537-39d03185ef57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.751862 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvrkx\" (UniqueName: \"kubernetes.io/projected/549b0ee3-cc17-43ff-8537-39d03185ef57-kube-api-access-zvrkx\") on node \"crc\" DevicePath \"\"" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.751905 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.751916 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/549b0ee3-cc17-43ff-8537-39d03185ef57-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.984276 4774 generic.go:334] "Generic (PLEG): container finished" podID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerID="599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878" exitCode=0 Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.985074 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerDied","Data":"599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878"} Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.985341 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rl8lx" event={"ID":"549b0ee3-cc17-43ff-8537-39d03185ef57","Type":"ContainerDied","Data":"a9a53cb07fa43fa3b236bfc40a0fb4ac3d2431ea58c0ab265252fa93b78861cf"} Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.985241 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rl8lx" Nov 21 14:59:06 crc kubenswrapper[4774]: I1121 14:59:06.985494 4774 scope.go:117] "RemoveContainer" containerID="599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.013398 4774 scope.go:117] "RemoveContainer" containerID="dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.022398 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rl8lx"] Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.029015 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rl8lx"] Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.034770 4774 scope.go:117] "RemoveContainer" containerID="74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.062492 4774 scope.go:117] "RemoveContainer" containerID="599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878" Nov 21 14:59:07 crc kubenswrapper[4774]: E1121 14:59:07.063489 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878\": container with ID starting with 599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878 not found: ID does not exist" containerID="599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.063619 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878"} err="failed to get container status \"599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878\": rpc error: code = NotFound desc = could not find container \"599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878\": container with ID starting with 599c2c5ef9c1188525ef6f7aa7b79638a9a665f694154da6ee47bbc3a4a11878 not found: ID does not exist" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.063734 4774 scope.go:117] "RemoveContainer" containerID="dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d" Nov 21 14:59:07 crc kubenswrapper[4774]: E1121 14:59:07.064265 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d\": container with ID starting with dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d not found: ID does not exist" containerID="dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.064308 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d"} err="failed to get container status \"dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d\": rpc error: code = NotFound desc = could not find container \"dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d\": container with ID starting with dc8e27eca1bc930b6956ccb29fa921096b737044fceb00320de04533581e073d not found: ID does not exist" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.064331 4774 scope.go:117] "RemoveContainer" containerID="74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151" Nov 21 14:59:07 crc kubenswrapper[4774]: E1121 14:59:07.064611 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151\": container with ID starting with 74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151 not found: ID does not exist" containerID="74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151" Nov 21 14:59:07 crc kubenswrapper[4774]: I1121 14:59:07.064696 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151"} err="failed to get container status \"74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151\": rpc error: code = NotFound desc = could not find container \"74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151\": container with ID starting with 74b21003053bfc40c4950c72a36161f2328ae3bf6528395dd45e8830346c0151 not found: ID does not exist" Nov 21 14:59:08 crc kubenswrapper[4774]: I1121 14:59:08.101963 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" path="/var/lib/kubelet/pods/549b0ee3-cc17-43ff-8537-39d03185ef57/volumes" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.197216 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s"] Nov 21 15:00:00 crc kubenswrapper[4774]: E1121 15:00:00.198086 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="extract-content" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.198182 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="extract-content" Nov 21 15:00:00 crc kubenswrapper[4774]: E1121 15:00:00.198225 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="extract-utilities" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.198233 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="extract-utilities" Nov 21 15:00:00 crc kubenswrapper[4774]: E1121 15:00:00.198249 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="registry-server" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.198256 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="registry-server" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.198436 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="549b0ee3-cc17-43ff-8537-39d03185ef57" containerName="registry-server" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.199102 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.202702 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.203103 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.218242 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s"] Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.297656 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97443f21-9db6-47a2-b6af-d508a06c69a2-config-volume\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.297737 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/97443f21-9db6-47a2-b6af-d508a06c69a2-secret-volume\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.297777 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzmxp\" (UniqueName: \"kubernetes.io/projected/97443f21-9db6-47a2-b6af-d508a06c69a2-kube-api-access-xzmxp\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.400328 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97443f21-9db6-47a2-b6af-d508a06c69a2-config-volume\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.400422 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/97443f21-9db6-47a2-b6af-d508a06c69a2-secret-volume\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.400459 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzmxp\" (UniqueName: \"kubernetes.io/projected/97443f21-9db6-47a2-b6af-d508a06c69a2-kube-api-access-xzmxp\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.402233 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97443f21-9db6-47a2-b6af-d508a06c69a2-config-volume\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.421806 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzmxp\" (UniqueName: \"kubernetes.io/projected/97443f21-9db6-47a2-b6af-d508a06c69a2-kube-api-access-xzmxp\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.422192 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/97443f21-9db6-47a2-b6af-d508a06c69a2-secret-volume\") pod \"collect-profiles-29395620-dzl5s\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.534629 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:00 crc kubenswrapper[4774]: I1121 15:00:00.998023 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s"] Nov 21 15:00:01 crc kubenswrapper[4774]: I1121 15:00:01.436084 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" event={"ID":"97443f21-9db6-47a2-b6af-d508a06c69a2","Type":"ContainerStarted","Data":"07b4199853d87430a2480cfe58b005b834ea1e8b113e865e7f15d5fdc3fe0ec8"} Nov 21 15:00:01 crc kubenswrapper[4774]: I1121 15:00:01.436568 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" event={"ID":"97443f21-9db6-47a2-b6af-d508a06c69a2","Type":"ContainerStarted","Data":"95fd358ebba3646828dc455ce43481d8a2e7e836f165e61fa2f4eb81a8aada24"} Nov 21 15:00:02 crc kubenswrapper[4774]: I1121 15:00:02.449806 4774 generic.go:334] "Generic (PLEG): container finished" podID="97443f21-9db6-47a2-b6af-d508a06c69a2" containerID="07b4199853d87430a2480cfe58b005b834ea1e8b113e865e7f15d5fdc3fe0ec8" exitCode=0 Nov 21 15:00:02 crc kubenswrapper[4774]: I1121 15:00:02.449891 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" event={"ID":"97443f21-9db6-47a2-b6af-d508a06c69a2","Type":"ContainerDied","Data":"07b4199853d87430a2480cfe58b005b834ea1e8b113e865e7f15d5fdc3fe0ec8"} Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.736420 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.851550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/97443f21-9db6-47a2-b6af-d508a06c69a2-secret-volume\") pod \"97443f21-9db6-47a2-b6af-d508a06c69a2\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.851650 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97443f21-9db6-47a2-b6af-d508a06c69a2-config-volume\") pod \"97443f21-9db6-47a2-b6af-d508a06c69a2\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.851768 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzmxp\" (UniqueName: \"kubernetes.io/projected/97443f21-9db6-47a2-b6af-d508a06c69a2-kube-api-access-xzmxp\") pod \"97443f21-9db6-47a2-b6af-d508a06c69a2\" (UID: \"97443f21-9db6-47a2-b6af-d508a06c69a2\") " Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.853157 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97443f21-9db6-47a2-b6af-d508a06c69a2-config-volume" (OuterVolumeSpecName: "config-volume") pod "97443f21-9db6-47a2-b6af-d508a06c69a2" (UID: "97443f21-9db6-47a2-b6af-d508a06c69a2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.858139 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97443f21-9db6-47a2-b6af-d508a06c69a2-kube-api-access-xzmxp" (OuterVolumeSpecName: "kube-api-access-xzmxp") pod "97443f21-9db6-47a2-b6af-d508a06c69a2" (UID: "97443f21-9db6-47a2-b6af-d508a06c69a2"). InnerVolumeSpecName "kube-api-access-xzmxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.858328 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97443f21-9db6-47a2-b6af-d508a06c69a2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "97443f21-9db6-47a2-b6af-d508a06c69a2" (UID: "97443f21-9db6-47a2-b6af-d508a06c69a2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.953500 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzmxp\" (UniqueName: \"kubernetes.io/projected/97443f21-9db6-47a2-b6af-d508a06c69a2-kube-api-access-xzmxp\") on node \"crc\" DevicePath \"\"" Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.953540 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/97443f21-9db6-47a2-b6af-d508a06c69a2-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:00:03 crc kubenswrapper[4774]: I1121 15:00:03.953553 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/97443f21-9db6-47a2-b6af-d508a06c69a2-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:00:04 crc kubenswrapper[4774]: I1121 15:00:04.468125 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" event={"ID":"97443f21-9db6-47a2-b6af-d508a06c69a2","Type":"ContainerDied","Data":"95fd358ebba3646828dc455ce43481d8a2e7e836f165e61fa2f4eb81a8aada24"} Nov 21 15:00:04 crc kubenswrapper[4774]: I1121 15:00:04.468171 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95fd358ebba3646828dc455ce43481d8a2e7e836f165e61fa2f4eb81a8aada24" Nov 21 15:00:04 crc kubenswrapper[4774]: I1121 15:00:04.468227 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s" Nov 21 15:00:04 crc kubenswrapper[4774]: I1121 15:00:04.821127 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9"] Nov 21 15:00:04 crc kubenswrapper[4774]: I1121 15:00:04.832082 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395575-hxvk9"] Nov 21 15:00:06 crc kubenswrapper[4774]: I1121 15:00:06.109910 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed90be29-4d9f-46c7-b158-074488aad60e" path="/var/lib/kubelet/pods/ed90be29-4d9f-46c7-b158-074488aad60e/volumes" Nov 21 15:00:39 crc kubenswrapper[4774]: I1121 15:00:39.103329 4774 scope.go:117] "RemoveContainer" containerID="2354d724114d4e9bac4893889c3264e28535e3216f96885003a0c5b26ad1d7df" Nov 21 15:01:29 crc kubenswrapper[4774]: I1121 15:01:29.600325 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:01:29 crc kubenswrapper[4774]: I1121 15:01:29.602002 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:01:59 crc kubenswrapper[4774]: I1121 15:01:59.600759 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:01:59 crc kubenswrapper[4774]: I1121 15:01:59.601327 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:02:29 crc kubenswrapper[4774]: I1121 15:02:29.601329 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:02:29 crc kubenswrapper[4774]: I1121 15:02:29.602480 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:02:29 crc kubenswrapper[4774]: I1121 15:02:29.602555 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:02:29 crc kubenswrapper[4774]: I1121 15:02:29.603788 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e873bbc47c06fda572bd9ffed046734326ec7588fb044d34c1818890f2e11c1f"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:02:29 crc kubenswrapper[4774]: I1121 15:02:29.603894 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://e873bbc47c06fda572bd9ffed046734326ec7588fb044d34c1818890f2e11c1f" gracePeriod=600 Nov 21 15:02:30 crc kubenswrapper[4774]: I1121 15:02:30.680185 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="e873bbc47c06fda572bd9ffed046734326ec7588fb044d34c1818890f2e11c1f" exitCode=0 Nov 21 15:02:30 crc kubenswrapper[4774]: I1121 15:02:30.680225 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"e873bbc47c06fda572bd9ffed046734326ec7588fb044d34c1818890f2e11c1f"} Nov 21 15:02:30 crc kubenswrapper[4774]: I1121 15:02:30.680675 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b"} Nov 21 15:02:30 crc kubenswrapper[4774]: I1121 15:02:30.680707 4774 scope.go:117] "RemoveContainer" containerID="d639c75a0ba8f8da8bdef168adf881cd67b7215638ca54b1a374380ef77d2107" Nov 21 15:04:29 crc kubenswrapper[4774]: I1121 15:04:29.600930 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:04:29 crc kubenswrapper[4774]: I1121 15:04:29.601446 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:04:59 crc kubenswrapper[4774]: I1121 15:04:59.600783 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:04:59 crc kubenswrapper[4774]: I1121 15:04:59.601464 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:05:29 crc kubenswrapper[4774]: I1121 15:05:29.601207 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:05:29 crc kubenswrapper[4774]: I1121 15:05:29.601784 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:05:29 crc kubenswrapper[4774]: I1121 15:05:29.601882 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:05:29 crc kubenswrapper[4774]: I1121 15:05:29.602936 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:05:29 crc kubenswrapper[4774]: I1121 15:05:29.603050 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" gracePeriod=600 Nov 21 15:05:29 crc kubenswrapper[4774]: E1121 15:05:29.755306 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:05:30 crc kubenswrapper[4774]: I1121 15:05:30.256900 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" exitCode=0 Nov 21 15:05:30 crc kubenswrapper[4774]: I1121 15:05:30.256973 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b"} Nov 21 15:05:30 crc kubenswrapper[4774]: I1121 15:05:30.257310 4774 scope.go:117] "RemoveContainer" containerID="e873bbc47c06fda572bd9ffed046734326ec7588fb044d34c1818890f2e11c1f" Nov 21 15:05:30 crc kubenswrapper[4774]: I1121 15:05:30.257908 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:05:30 crc kubenswrapper[4774]: E1121 15:05:30.258293 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:05:45 crc kubenswrapper[4774]: I1121 15:05:45.092884 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:05:45 crc kubenswrapper[4774]: E1121 15:05:45.093615 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:05:58 crc kubenswrapper[4774]: I1121 15:05:58.093992 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:05:58 crc kubenswrapper[4774]: E1121 15:05:58.095419 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:06:09 crc kubenswrapper[4774]: I1121 15:06:09.093736 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:06:09 crc kubenswrapper[4774]: E1121 15:06:09.094629 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:06:20 crc kubenswrapper[4774]: I1121 15:06:20.098199 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:06:20 crc kubenswrapper[4774]: E1121 15:06:20.100406 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:06:32 crc kubenswrapper[4774]: I1121 15:06:32.092742 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:06:32 crc kubenswrapper[4774]: E1121 15:06:32.093492 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:06:47 crc kubenswrapper[4774]: I1121 15:06:47.093158 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:06:47 crc kubenswrapper[4774]: E1121 15:06:47.093926 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:07:01 crc kubenswrapper[4774]: I1121 15:07:01.093562 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:07:01 crc kubenswrapper[4774]: E1121 15:07:01.094420 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:07:16 crc kubenswrapper[4774]: I1121 15:07:16.093450 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:07:16 crc kubenswrapper[4774]: E1121 15:07:16.094140 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:07:27 crc kubenswrapper[4774]: I1121 15:07:27.093255 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:07:27 crc kubenswrapper[4774]: E1121 15:07:27.095714 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:07:42 crc kubenswrapper[4774]: I1121 15:07:42.093306 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:07:42 crc kubenswrapper[4774]: E1121 15:07:42.094089 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:07:53 crc kubenswrapper[4774]: I1121 15:07:53.093787 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:07:53 crc kubenswrapper[4774]: E1121 15:07:53.095021 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.197558 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xbdkg"] Nov 21 15:07:59 crc kubenswrapper[4774]: E1121 15:07:59.198570 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97443f21-9db6-47a2-b6af-d508a06c69a2" containerName="collect-profiles" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.198592 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="97443f21-9db6-47a2-b6af-d508a06c69a2" containerName="collect-profiles" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.198799 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="97443f21-9db6-47a2-b6af-d508a06c69a2" containerName="collect-profiles" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.200169 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.216166 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xbdkg"] Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.319306 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-utilities\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.319410 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjjks\" (UniqueName: \"kubernetes.io/projected/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-kube-api-access-tjjks\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.319457 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-catalog-content\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.420760 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjjks\" (UniqueName: \"kubernetes.io/projected/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-kube-api-access-tjjks\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.421115 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-catalog-content\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.421229 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-utilities\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.421695 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-catalog-content\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.422033 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-utilities\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.445208 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjjks\" (UniqueName: \"kubernetes.io/projected/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-kube-api-access-tjjks\") pod \"certified-operators-xbdkg\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.518642 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:07:59 crc kubenswrapper[4774]: I1121 15:07:59.972778 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xbdkg"] Nov 21 15:08:00 crc kubenswrapper[4774]: I1121 15:08:00.438194 4774 generic.go:334] "Generic (PLEG): container finished" podID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerID="5405d690f79313a2e66751b5be94e080ed28d90ce82cc0bc029758840aa34257" exitCode=0 Nov 21 15:08:00 crc kubenswrapper[4774]: I1121 15:08:00.438482 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xbdkg" event={"ID":"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a","Type":"ContainerDied","Data":"5405d690f79313a2e66751b5be94e080ed28d90ce82cc0bc029758840aa34257"} Nov 21 15:08:00 crc kubenswrapper[4774]: I1121 15:08:00.438530 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xbdkg" event={"ID":"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a","Type":"ContainerStarted","Data":"d32533f4286e3fb685fc6d9877a3c5d2babcb4d455496808d6d10692a6926266"} Nov 21 15:08:00 crc kubenswrapper[4774]: I1121 15:08:00.440460 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:08:04 crc kubenswrapper[4774]: I1121 15:08:04.093259 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:08:04 crc kubenswrapper[4774]: E1121 15:08:04.094020 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:08:05 crc kubenswrapper[4774]: I1121 15:08:05.477483 4774 generic.go:334] "Generic (PLEG): container finished" podID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerID="3cc0b2c9eeba363610a0dedcaa9d564eb967e8c93942ce3da17b604577b30306" exitCode=0 Nov 21 15:08:05 crc kubenswrapper[4774]: I1121 15:08:05.477565 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xbdkg" event={"ID":"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a","Type":"ContainerDied","Data":"3cc0b2c9eeba363610a0dedcaa9d564eb967e8c93942ce3da17b604577b30306"} Nov 21 15:08:07 crc kubenswrapper[4774]: I1121 15:08:07.503460 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xbdkg" event={"ID":"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a","Type":"ContainerStarted","Data":"0bba85d33d553014ad0868a9cf1456bb8f36b03c749fd40fa90ac47da1fab984"} Nov 21 15:08:09 crc kubenswrapper[4774]: I1121 15:08:09.525705 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:08:09 crc kubenswrapper[4774]: I1121 15:08:09.526136 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:08:09 crc kubenswrapper[4774]: I1121 15:08:09.657057 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:08:09 crc kubenswrapper[4774]: I1121 15:08:09.680603 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xbdkg" podStartSLOduration=4.666295549 podStartE2EDuration="10.680581543s" podCreationTimestamp="2025-11-21 15:07:59 +0000 UTC" firstStartedPulling="2025-11-21 15:08:00.440230835 +0000 UTC m=+3871.092430094" lastFinishedPulling="2025-11-21 15:08:06.454516829 +0000 UTC m=+3877.106716088" observedRunningTime="2025-11-21 15:08:07.520990403 +0000 UTC m=+3878.173189682" watchObservedRunningTime="2025-11-21 15:08:09.680581543 +0000 UTC m=+3880.332780812" Nov 21 15:08:19 crc kubenswrapper[4774]: I1121 15:08:19.093621 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:08:19 crc kubenswrapper[4774]: E1121 15:08:19.094401 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:08:19 crc kubenswrapper[4774]: I1121 15:08:19.562980 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:08:19 crc kubenswrapper[4774]: I1121 15:08:19.602339 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xbdkg"] Nov 21 15:08:19 crc kubenswrapper[4774]: I1121 15:08:19.607323 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xbdkg" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="registry-server" containerID="cri-o://0bba85d33d553014ad0868a9cf1456bb8f36b03c749fd40fa90ac47da1fab984" gracePeriod=2 Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.615300 4774 generic.go:334] "Generic (PLEG): container finished" podID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerID="0bba85d33d553014ad0868a9cf1456bb8f36b03c749fd40fa90ac47da1fab984" exitCode=0 Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.615597 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xbdkg" event={"ID":"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a","Type":"ContainerDied","Data":"0bba85d33d553014ad0868a9cf1456bb8f36b03c749fd40fa90ac47da1fab984"} Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.659907 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.753644 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-utilities\") pod \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.753757 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-catalog-content\") pod \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.753843 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjjks\" (UniqueName: \"kubernetes.io/projected/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-kube-api-access-tjjks\") pod \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\" (UID: \"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a\") " Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.754696 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-utilities" (OuterVolumeSpecName: "utilities") pod "2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" (UID: "2846d5ee-4c2f-40c7-85fc-8372f8e20e8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.760398 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-kube-api-access-tjjks" (OuterVolumeSpecName: "kube-api-access-tjjks") pod "2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" (UID: "2846d5ee-4c2f-40c7-85fc-8372f8e20e8a"). InnerVolumeSpecName "kube-api-access-tjjks". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.807725 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" (UID: "2846d5ee-4c2f-40c7-85fc-8372f8e20e8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.855835 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.855870 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:08:20 crc kubenswrapper[4774]: I1121 15:08:20.855887 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjjks\" (UniqueName: \"kubernetes.io/projected/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a-kube-api-access-tjjks\") on node \"crc\" DevicePath \"\"" Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.625885 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xbdkg" event={"ID":"2846d5ee-4c2f-40c7-85fc-8372f8e20e8a","Type":"ContainerDied","Data":"d32533f4286e3fb685fc6d9877a3c5d2babcb4d455496808d6d10692a6926266"} Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.625981 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xbdkg" Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.626271 4774 scope.go:117] "RemoveContainer" containerID="0bba85d33d553014ad0868a9cf1456bb8f36b03c749fd40fa90ac47da1fab984" Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.653801 4774 scope.go:117] "RemoveContainer" containerID="3cc0b2c9eeba363610a0dedcaa9d564eb967e8c93942ce3da17b604577b30306" Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.674845 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xbdkg"] Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.684322 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xbdkg"] Nov 21 15:08:21 crc kubenswrapper[4774]: I1121 15:08:21.706272 4774 scope.go:117] "RemoveContainer" containerID="5405d690f79313a2e66751b5be94e080ed28d90ce82cc0bc029758840aa34257" Nov 21 15:08:22 crc kubenswrapper[4774]: I1121 15:08:22.107798 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" path="/var/lib/kubelet/pods/2846d5ee-4c2f-40c7-85fc-8372f8e20e8a/volumes" Nov 21 15:08:30 crc kubenswrapper[4774]: I1121 15:08:30.097736 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:08:30 crc kubenswrapper[4774]: E1121 15:08:30.098879 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.093897 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:08:42 crc kubenswrapper[4774]: E1121 15:08:42.095135 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.752597 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdh9"] Nov 21 15:08:42 crc kubenswrapper[4774]: E1121 15:08:42.752982 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="extract-utilities" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.752998 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="extract-utilities" Nov 21 15:08:42 crc kubenswrapper[4774]: E1121 15:08:42.753025 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="registry-server" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.753034 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="registry-server" Nov 21 15:08:42 crc kubenswrapper[4774]: E1121 15:08:42.753045 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="extract-content" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.753054 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="extract-content" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.753257 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2846d5ee-4c2f-40c7-85fc-8372f8e20e8a" containerName="registry-server" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.755493 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.762404 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdh9"] Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.915771 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-utilities\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.915837 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qng28\" (UniqueName: \"kubernetes.io/projected/0b94295a-620a-4351-a42b-19cc6cd6c884-kube-api-access-qng28\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:42 crc kubenswrapper[4774]: I1121 15:08:42.916110 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-catalog-content\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.017278 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-utilities\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.017325 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qng28\" (UniqueName: \"kubernetes.io/projected/0b94295a-620a-4351-a42b-19cc6cd6c884-kube-api-access-qng28\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.017398 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-catalog-content\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.017731 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-utilities\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.017769 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-catalog-content\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.036573 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qng28\" (UniqueName: \"kubernetes.io/projected/0b94295a-620a-4351-a42b-19cc6cd6c884-kube-api-access-qng28\") pod \"redhat-marketplace-qvdh9\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.088151 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.322365 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdh9"] Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.797053 4774 generic.go:334] "Generic (PLEG): container finished" podID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerID="5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930" exitCode=0 Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.797146 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdh9" event={"ID":"0b94295a-620a-4351-a42b-19cc6cd6c884","Type":"ContainerDied","Data":"5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930"} Nov 21 15:08:43 crc kubenswrapper[4774]: I1121 15:08:43.797370 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdh9" event={"ID":"0b94295a-620a-4351-a42b-19cc6cd6c884","Type":"ContainerStarted","Data":"00898245695ec8faa41f89efb5dad7b5ea538d6b8b4a651ffd833f0bf7479821"} Nov 21 15:08:47 crc kubenswrapper[4774]: I1121 15:08:46.821247 4774 generic.go:334] "Generic (PLEG): container finished" podID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerID="d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b" exitCode=0 Nov 21 15:08:47 crc kubenswrapper[4774]: I1121 15:08:46.821418 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdh9" event={"ID":"0b94295a-620a-4351-a42b-19cc6cd6c884","Type":"ContainerDied","Data":"d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b"} Nov 21 15:08:49 crc kubenswrapper[4774]: I1121 15:08:49.845076 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdh9" event={"ID":"0b94295a-620a-4351-a42b-19cc6cd6c884","Type":"ContainerStarted","Data":"05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f"} Nov 21 15:08:49 crc kubenswrapper[4774]: I1121 15:08:49.863398 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qvdh9" podStartSLOduration=3.15107706 podStartE2EDuration="7.863382641s" podCreationTimestamp="2025-11-21 15:08:42 +0000 UTC" firstStartedPulling="2025-11-21 15:08:43.799109929 +0000 UTC m=+3914.451309188" lastFinishedPulling="2025-11-21 15:08:48.51141552 +0000 UTC m=+3919.163614769" observedRunningTime="2025-11-21 15:08:49.86089625 +0000 UTC m=+3920.513095509" watchObservedRunningTime="2025-11-21 15:08:49.863382641 +0000 UTC m=+3920.515581900" Nov 21 15:08:53 crc kubenswrapper[4774]: I1121 15:08:53.088418 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:53 crc kubenswrapper[4774]: I1121 15:08:53.088783 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:53 crc kubenswrapper[4774]: I1121 15:08:53.130803 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:53 crc kubenswrapper[4774]: I1121 15:08:53.943584 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:53 crc kubenswrapper[4774]: I1121 15:08:53.990016 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdh9"] Nov 21 15:08:55 crc kubenswrapper[4774]: I1121 15:08:55.889314 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qvdh9" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="registry-server" containerID="cri-o://05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f" gracePeriod=2 Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.092761 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:08:56 crc kubenswrapper[4774]: E1121 15:08:56.093147 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.799441 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.905128 4774 generic.go:334] "Generic (PLEG): container finished" podID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerID="05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f" exitCode=0 Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.905195 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdh9" event={"ID":"0b94295a-620a-4351-a42b-19cc6cd6c884","Type":"ContainerDied","Data":"05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f"} Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.905221 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qvdh9" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.905254 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qvdh9" event={"ID":"0b94295a-620a-4351-a42b-19cc6cd6c884","Type":"ContainerDied","Data":"00898245695ec8faa41f89efb5dad7b5ea538d6b8b4a651ffd833f0bf7479821"} Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.905275 4774 scope.go:117] "RemoveContainer" containerID="05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.916542 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-catalog-content\") pod \"0b94295a-620a-4351-a42b-19cc6cd6c884\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.916636 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-utilities\") pod \"0b94295a-620a-4351-a42b-19cc6cd6c884\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.916688 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qng28\" (UniqueName: \"kubernetes.io/projected/0b94295a-620a-4351-a42b-19cc6cd6c884-kube-api-access-qng28\") pod \"0b94295a-620a-4351-a42b-19cc6cd6c884\" (UID: \"0b94295a-620a-4351-a42b-19cc6cd6c884\") " Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.917631 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-utilities" (OuterVolumeSpecName: "utilities") pod "0b94295a-620a-4351-a42b-19cc6cd6c884" (UID: "0b94295a-620a-4351-a42b-19cc6cd6c884"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.921224 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b94295a-620a-4351-a42b-19cc6cd6c884-kube-api-access-qng28" (OuterVolumeSpecName: "kube-api-access-qng28") pod "0b94295a-620a-4351-a42b-19cc6cd6c884" (UID: "0b94295a-620a-4351-a42b-19cc6cd6c884"). InnerVolumeSpecName "kube-api-access-qng28". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.931535 4774 scope.go:117] "RemoveContainer" containerID="d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.939959 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b94295a-620a-4351-a42b-19cc6cd6c884" (UID: "0b94295a-620a-4351-a42b-19cc6cd6c884"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.964727 4774 scope.go:117] "RemoveContainer" containerID="5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.985311 4774 scope.go:117] "RemoveContainer" containerID="05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f" Nov 21 15:08:56 crc kubenswrapper[4774]: E1121 15:08:56.985747 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f\": container with ID starting with 05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f not found: ID does not exist" containerID="05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.985795 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f"} err="failed to get container status \"05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f\": rpc error: code = NotFound desc = could not find container \"05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f\": container with ID starting with 05d4194051866f0b6cf9edb3e087de2cca4aea23f6e37bfa81cd5b161bf0c11f not found: ID does not exist" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.985877 4774 scope.go:117] "RemoveContainer" containerID="d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b" Nov 21 15:08:56 crc kubenswrapper[4774]: E1121 15:08:56.986334 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b\": container with ID starting with d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b not found: ID does not exist" containerID="d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.986380 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b"} err="failed to get container status \"d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b\": rpc error: code = NotFound desc = could not find container \"d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b\": container with ID starting with d140286bf5a050bd6091b7004c43bd71319b55b33826c29f5e905535c2e4dc8b not found: ID does not exist" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.986412 4774 scope.go:117] "RemoveContainer" containerID="5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930" Nov 21 15:08:56 crc kubenswrapper[4774]: E1121 15:08:56.986699 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930\": container with ID starting with 5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930 not found: ID does not exist" containerID="5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930" Nov 21 15:08:56 crc kubenswrapper[4774]: I1121 15:08:56.986751 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930"} err="failed to get container status \"5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930\": rpc error: code = NotFound desc = could not find container \"5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930\": container with ID starting with 5d6b5fbda808cee4e15001fe36a4f9c05a55c01527af902688c63563d696b930 not found: ID does not exist" Nov 21 15:08:57 crc kubenswrapper[4774]: I1121 15:08:57.017852 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:08:57 crc kubenswrapper[4774]: I1121 15:08:57.017881 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b94295a-620a-4351-a42b-19cc6cd6c884-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:08:57 crc kubenswrapper[4774]: I1121 15:08:57.017893 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qng28\" (UniqueName: \"kubernetes.io/projected/0b94295a-620a-4351-a42b-19cc6cd6c884-kube-api-access-qng28\") on node \"crc\" DevicePath \"\"" Nov 21 15:08:57 crc kubenswrapper[4774]: I1121 15:08:57.256354 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdh9"] Nov 21 15:08:57 crc kubenswrapper[4774]: I1121 15:08:57.264467 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qvdh9"] Nov 21 15:08:58 crc kubenswrapper[4774]: I1121 15:08:58.102115 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" path="/var/lib/kubelet/pods/0b94295a-620a-4351-a42b-19cc6cd6c884/volumes" Nov 21 15:09:09 crc kubenswrapper[4774]: I1121 15:09:09.092832 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:09:09 crc kubenswrapper[4774]: E1121 15:09:09.093595 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:09:22 crc kubenswrapper[4774]: I1121 15:09:22.093183 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:09:22 crc kubenswrapper[4774]: E1121 15:09:22.094156 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:09:35 crc kubenswrapper[4774]: I1121 15:09:35.092837 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:09:35 crc kubenswrapper[4774]: E1121 15:09:35.093480 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:09:46 crc kubenswrapper[4774]: I1121 15:09:46.094250 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:09:46 crc kubenswrapper[4774]: E1121 15:09:46.095325 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:09:58 crc kubenswrapper[4774]: I1121 15:09:58.093441 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:09:58 crc kubenswrapper[4774]: E1121 15:09:58.094366 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:10:12 crc kubenswrapper[4774]: I1121 15:10:12.094157 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:10:12 crc kubenswrapper[4774]: E1121 15:10:12.094887 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.385918 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jrjtb"] Nov 21 15:10:20 crc kubenswrapper[4774]: E1121 15:10:20.387005 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="extract-content" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.387028 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="extract-content" Nov 21 15:10:20 crc kubenswrapper[4774]: E1121 15:10:20.387047 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="registry-server" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.387060 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="registry-server" Nov 21 15:10:20 crc kubenswrapper[4774]: E1121 15:10:20.387087 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="extract-utilities" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.387098 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="extract-utilities" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.387345 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b94295a-620a-4351-a42b-19cc6cd6c884" containerName="registry-server" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.389130 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.409478 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jrjtb"] Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.479275 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqwv7\" (UniqueName: \"kubernetes.io/projected/a9ea5a73-2c6f-4004-b453-93af566cf163-kube-api-access-jqwv7\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.479420 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-utilities\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.479487 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-catalog-content\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.580579 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-catalog-content\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.580734 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqwv7\" (UniqueName: \"kubernetes.io/projected/a9ea5a73-2c6f-4004-b453-93af566cf163-kube-api-access-jqwv7\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.580776 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-utilities\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.581173 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-catalog-content\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.581278 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-utilities\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.601565 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqwv7\" (UniqueName: \"kubernetes.io/projected/a9ea5a73-2c6f-4004-b453-93af566cf163-kube-api-access-jqwv7\") pod \"redhat-operators-jrjtb\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:20 crc kubenswrapper[4774]: I1121 15:10:20.738790 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:21 crc kubenswrapper[4774]: I1121 15:10:21.187108 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jrjtb"] Nov 21 15:10:21 crc kubenswrapper[4774]: I1121 15:10:21.576578 4774 generic.go:334] "Generic (PLEG): container finished" podID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerID="ef65b4ce6927b44147c26feca47f163e281908a85039d9a868438773af6295f1" exitCode=0 Nov 21 15:10:21 crc kubenswrapper[4774]: I1121 15:10:21.576785 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerDied","Data":"ef65b4ce6927b44147c26feca47f163e281908a85039d9a868438773af6295f1"} Nov 21 15:10:21 crc kubenswrapper[4774]: I1121 15:10:21.576968 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerStarted","Data":"6b9e83c5258c6a5bc415878f637ca7c0ec58435b2df74d657e5ad9ec4a1cce94"} Nov 21 15:10:22 crc kubenswrapper[4774]: I1121 15:10:22.587937 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerStarted","Data":"1807e5214eef578f00c099a67d0ef3756528f2fdb445fcb62fa247ce8dcd120e"} Nov 21 15:10:23 crc kubenswrapper[4774]: I1121 15:10:23.597627 4774 generic.go:334] "Generic (PLEG): container finished" podID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerID="1807e5214eef578f00c099a67d0ef3756528f2fdb445fcb62fa247ce8dcd120e" exitCode=0 Nov 21 15:10:23 crc kubenswrapper[4774]: I1121 15:10:23.597706 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerDied","Data":"1807e5214eef578f00c099a67d0ef3756528f2fdb445fcb62fa247ce8dcd120e"} Nov 21 15:10:24 crc kubenswrapper[4774]: I1121 15:10:24.609375 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerStarted","Data":"2d0792b60d05ca66fdcdfec0ae24edc8ff2f0995439150bb5e3d74ba29272419"} Nov 21 15:10:24 crc kubenswrapper[4774]: I1121 15:10:24.641846 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jrjtb" podStartSLOduration=2.154354224 podStartE2EDuration="4.641791431s" podCreationTimestamp="2025-11-21 15:10:20 +0000 UTC" firstStartedPulling="2025-11-21 15:10:21.578779568 +0000 UTC m=+4012.230978827" lastFinishedPulling="2025-11-21 15:10:24.066216735 +0000 UTC m=+4014.718416034" observedRunningTime="2025-11-21 15:10:24.626557996 +0000 UTC m=+4015.278757275" watchObservedRunningTime="2025-11-21 15:10:24.641791431 +0000 UTC m=+4015.293990730" Nov 21 15:10:25 crc kubenswrapper[4774]: I1121 15:10:25.093696 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:10:25 crc kubenswrapper[4774]: E1121 15:10:25.094044 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:10:30 crc kubenswrapper[4774]: I1121 15:10:30.739749 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:30 crc kubenswrapper[4774]: I1121 15:10:30.740404 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:30 crc kubenswrapper[4774]: I1121 15:10:30.807873 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:31 crc kubenswrapper[4774]: I1121 15:10:31.702511 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:31 crc kubenswrapper[4774]: I1121 15:10:31.750734 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jrjtb"] Nov 21 15:10:33 crc kubenswrapper[4774]: I1121 15:10:33.682389 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jrjtb" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="registry-server" containerID="cri-o://2d0792b60d05ca66fdcdfec0ae24edc8ff2f0995439150bb5e3d74ba29272419" gracePeriod=2 Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.700583 4774 generic.go:334] "Generic (PLEG): container finished" podID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerID="2d0792b60d05ca66fdcdfec0ae24edc8ff2f0995439150bb5e3d74ba29272419" exitCode=0 Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.700657 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerDied","Data":"2d0792b60d05ca66fdcdfec0ae24edc8ff2f0995439150bb5e3d74ba29272419"} Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.884188 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.921801 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-utilities\") pod \"a9ea5a73-2c6f-4004-b453-93af566cf163\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.921901 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqwv7\" (UniqueName: \"kubernetes.io/projected/a9ea5a73-2c6f-4004-b453-93af566cf163-kube-api-access-jqwv7\") pod \"a9ea5a73-2c6f-4004-b453-93af566cf163\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.922144 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-catalog-content\") pod \"a9ea5a73-2c6f-4004-b453-93af566cf163\" (UID: \"a9ea5a73-2c6f-4004-b453-93af566cf163\") " Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.922710 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-utilities" (OuterVolumeSpecName: "utilities") pod "a9ea5a73-2c6f-4004-b453-93af566cf163" (UID: "a9ea5a73-2c6f-4004-b453-93af566cf163"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:10:35 crc kubenswrapper[4774]: I1121 15:10:35.929804 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ea5a73-2c6f-4004-b453-93af566cf163-kube-api-access-jqwv7" (OuterVolumeSpecName: "kube-api-access-jqwv7") pod "a9ea5a73-2c6f-4004-b453-93af566cf163" (UID: "a9ea5a73-2c6f-4004-b453-93af566cf163"). InnerVolumeSpecName "kube-api-access-jqwv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.022362 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9ea5a73-2c6f-4004-b453-93af566cf163" (UID: "a9ea5a73-2c6f-4004-b453-93af566cf163"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.024344 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.024375 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqwv7\" (UniqueName: \"kubernetes.io/projected/a9ea5a73-2c6f-4004-b453-93af566cf163-kube-api-access-jqwv7\") on node \"crc\" DevicePath \"\"" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.024386 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ea5a73-2c6f-4004-b453-93af566cf163-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.714206 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jrjtb" event={"ID":"a9ea5a73-2c6f-4004-b453-93af566cf163","Type":"ContainerDied","Data":"6b9e83c5258c6a5bc415878f637ca7c0ec58435b2df74d657e5ad9ec4a1cce94"} Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.714266 4774 scope.go:117] "RemoveContainer" containerID="2d0792b60d05ca66fdcdfec0ae24edc8ff2f0995439150bb5e3d74ba29272419" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.714265 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jrjtb" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.734197 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jrjtb"] Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.740024 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jrjtb"] Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.744995 4774 scope.go:117] "RemoveContainer" containerID="1807e5214eef578f00c099a67d0ef3756528f2fdb445fcb62fa247ce8dcd120e" Nov 21 15:10:36 crc kubenswrapper[4774]: I1121 15:10:36.762262 4774 scope.go:117] "RemoveContainer" containerID="ef65b4ce6927b44147c26feca47f163e281908a85039d9a868438773af6295f1" Nov 21 15:10:38 crc kubenswrapper[4774]: I1121 15:10:38.093896 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:10:38 crc kubenswrapper[4774]: I1121 15:10:38.106833 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" path="/var/lib/kubelet/pods/a9ea5a73-2c6f-4004-b453-93af566cf163/volumes" Nov 21 15:10:38 crc kubenswrapper[4774]: I1121 15:10:38.749173 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"2c79599e6fdda7a7887496d81a1ed1c3dda2c8683c0fe948188f52190bef919a"} Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.653893 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-v8krs"] Nov 21 15:12:10 crc kubenswrapper[4774]: E1121 15:12:10.654927 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="registry-server" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.654943 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="registry-server" Nov 21 15:12:10 crc kubenswrapper[4774]: E1121 15:12:10.654971 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="extract-utilities" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.654980 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="extract-utilities" Nov 21 15:12:10 crc kubenswrapper[4774]: E1121 15:12:10.654998 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="extract-content" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.655006 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="extract-content" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.655204 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ea5a73-2c6f-4004-b453-93af566cf163" containerName="registry-server" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.657027 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.664020 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v8krs"] Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.756923 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-utilities\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.757035 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v42j\" (UniqueName: \"kubernetes.io/projected/33fa9812-9dd5-447c-935d-accb20dcc416-kube-api-access-4v42j\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.757113 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-catalog-content\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.858728 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v42j\" (UniqueName: \"kubernetes.io/projected/33fa9812-9dd5-447c-935d-accb20dcc416-kube-api-access-4v42j\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.858842 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-catalog-content\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.858894 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-utilities\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.859312 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-catalog-content\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.859383 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-utilities\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.882110 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v42j\" (UniqueName: \"kubernetes.io/projected/33fa9812-9dd5-447c-935d-accb20dcc416-kube-api-access-4v42j\") pod \"community-operators-v8krs\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:10 crc kubenswrapper[4774]: I1121 15:12:10.983867 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:11 crc kubenswrapper[4774]: I1121 15:12:11.223186 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-v8krs"] Nov 21 15:12:11 crc kubenswrapper[4774]: I1121 15:12:11.433713 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerStarted","Data":"b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281"} Nov 21 15:12:11 crc kubenswrapper[4774]: I1121 15:12:11.434247 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerStarted","Data":"ae32593e4df99d3b4c890f0966d77d8cd2373a692fcfc6547979678049ed7910"} Nov 21 15:12:12 crc kubenswrapper[4774]: I1121 15:12:12.442335 4774 generic.go:334] "Generic (PLEG): container finished" podID="33fa9812-9dd5-447c-935d-accb20dcc416" containerID="b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281" exitCode=0 Nov 21 15:12:12 crc kubenswrapper[4774]: I1121 15:12:12.442408 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerDied","Data":"b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281"} Nov 21 15:12:14 crc kubenswrapper[4774]: I1121 15:12:14.460099 4774 generic.go:334] "Generic (PLEG): container finished" podID="33fa9812-9dd5-447c-935d-accb20dcc416" containerID="3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a" exitCode=0 Nov 21 15:12:14 crc kubenswrapper[4774]: I1121 15:12:14.460284 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerDied","Data":"3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a"} Nov 21 15:12:15 crc kubenswrapper[4774]: I1121 15:12:15.468473 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerStarted","Data":"7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d"} Nov 21 15:12:20 crc kubenswrapper[4774]: I1121 15:12:20.984343 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:20 crc kubenswrapper[4774]: I1121 15:12:20.984948 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:21 crc kubenswrapper[4774]: I1121 15:12:21.028494 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:21 crc kubenswrapper[4774]: I1121 15:12:21.047981 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-v8krs" podStartSLOduration=8.5514483 podStartE2EDuration="11.047960576s" podCreationTimestamp="2025-11-21 15:12:10 +0000 UTC" firstStartedPulling="2025-11-21 15:12:12.444572521 +0000 UTC m=+4123.096771780" lastFinishedPulling="2025-11-21 15:12:14.941084797 +0000 UTC m=+4125.593284056" observedRunningTime="2025-11-21 15:12:15.495499709 +0000 UTC m=+4126.147698968" watchObservedRunningTime="2025-11-21 15:12:21.047960576 +0000 UTC m=+4131.700159835" Nov 21 15:12:21 crc kubenswrapper[4774]: I1121 15:12:21.560398 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:21 crc kubenswrapper[4774]: I1121 15:12:21.605884 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v8krs"] Nov 21 15:12:23 crc kubenswrapper[4774]: I1121 15:12:23.522912 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-v8krs" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="registry-server" containerID="cri-o://7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d" gracePeriod=2 Nov 21 15:12:23 crc kubenswrapper[4774]: I1121 15:12:23.908868 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.058135 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-utilities\") pod \"33fa9812-9dd5-447c-935d-accb20dcc416\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.058908 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v42j\" (UniqueName: \"kubernetes.io/projected/33fa9812-9dd5-447c-935d-accb20dcc416-kube-api-access-4v42j\") pod \"33fa9812-9dd5-447c-935d-accb20dcc416\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.059008 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-catalog-content\") pod \"33fa9812-9dd5-447c-935d-accb20dcc416\" (UID: \"33fa9812-9dd5-447c-935d-accb20dcc416\") " Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.059627 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-utilities" (OuterVolumeSpecName: "utilities") pod "33fa9812-9dd5-447c-935d-accb20dcc416" (UID: "33fa9812-9dd5-447c-935d-accb20dcc416"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.070184 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33fa9812-9dd5-447c-935d-accb20dcc416-kube-api-access-4v42j" (OuterVolumeSpecName: "kube-api-access-4v42j") pod "33fa9812-9dd5-447c-935d-accb20dcc416" (UID: "33fa9812-9dd5-447c-935d-accb20dcc416"). InnerVolumeSpecName "kube-api-access-4v42j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.128221 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33fa9812-9dd5-447c-935d-accb20dcc416" (UID: "33fa9812-9dd5-447c-935d-accb20dcc416"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.160997 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.161034 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33fa9812-9dd5-447c-935d-accb20dcc416-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.161044 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v42j\" (UniqueName: \"kubernetes.io/projected/33fa9812-9dd5-447c-935d-accb20dcc416-kube-api-access-4v42j\") on node \"crc\" DevicePath \"\"" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.535177 4774 generic.go:334] "Generic (PLEG): container finished" podID="33fa9812-9dd5-447c-935d-accb20dcc416" containerID="7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d" exitCode=0 Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.535242 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerDied","Data":"7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d"} Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.535298 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-v8krs" event={"ID":"33fa9812-9dd5-447c-935d-accb20dcc416","Type":"ContainerDied","Data":"ae32593e4df99d3b4c890f0966d77d8cd2373a692fcfc6547979678049ed7910"} Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.535313 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-v8krs" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.535321 4774 scope.go:117] "RemoveContainer" containerID="7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.564288 4774 scope.go:117] "RemoveContainer" containerID="3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.580614 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-v8krs"] Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.589610 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-v8krs"] Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.617654 4774 scope.go:117] "RemoveContainer" containerID="b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.654039 4774 scope.go:117] "RemoveContainer" containerID="7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d" Nov 21 15:12:24 crc kubenswrapper[4774]: E1121 15:12:24.654875 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d\": container with ID starting with 7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d not found: ID does not exist" containerID="7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.654964 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d"} err="failed to get container status \"7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d\": rpc error: code = NotFound desc = could not find container \"7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d\": container with ID starting with 7637cd324982c2761ced5dc4620ee7cc3a0e4f90905b8c4e854d3608fab51a3d not found: ID does not exist" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.655014 4774 scope.go:117] "RemoveContainer" containerID="3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a" Nov 21 15:12:24 crc kubenswrapper[4774]: E1121 15:12:24.655441 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a\": container with ID starting with 3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a not found: ID does not exist" containerID="3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.655481 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a"} err="failed to get container status \"3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a\": rpc error: code = NotFound desc = could not find container \"3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a\": container with ID starting with 3b44d1ff3b5d9b97dd44d53a5c44abf5634214a5c9882f04b02b7b9fc6e0667a not found: ID does not exist" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.655496 4774 scope.go:117] "RemoveContainer" containerID="b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281" Nov 21 15:12:24 crc kubenswrapper[4774]: E1121 15:12:24.655686 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281\": container with ID starting with b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281 not found: ID does not exist" containerID="b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281" Nov 21 15:12:24 crc kubenswrapper[4774]: I1121 15:12:24.655718 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281"} err="failed to get container status \"b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281\": rpc error: code = NotFound desc = could not find container \"b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281\": container with ID starting with b8d322a374c183b17db8a9436d2d252d371173d8cd378ebf6954293d4eb7b281 not found: ID does not exist" Nov 21 15:12:26 crc kubenswrapper[4774]: I1121 15:12:26.105032 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" path="/var/lib/kubelet/pods/33fa9812-9dd5-447c-935d-accb20dcc416/volumes" Nov 21 15:12:59 crc kubenswrapper[4774]: I1121 15:12:59.601217 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:12:59 crc kubenswrapper[4774]: I1121 15:12:59.601711 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:13:29 crc kubenswrapper[4774]: I1121 15:13:29.601111 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:13:29 crc kubenswrapper[4774]: I1121 15:13:29.601792 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:13:59 crc kubenswrapper[4774]: I1121 15:13:59.600664 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:13:59 crc kubenswrapper[4774]: I1121 15:13:59.601375 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:13:59 crc kubenswrapper[4774]: I1121 15:13:59.601447 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:13:59 crc kubenswrapper[4774]: I1121 15:13:59.602456 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2c79599e6fdda7a7887496d81a1ed1c3dda2c8683c0fe948188f52190bef919a"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:13:59 crc kubenswrapper[4774]: I1121 15:13:59.602581 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://2c79599e6fdda7a7887496d81a1ed1c3dda2c8683c0fe948188f52190bef919a" gracePeriod=600 Nov 21 15:14:00 crc kubenswrapper[4774]: I1121 15:14:00.272211 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="2c79599e6fdda7a7887496d81a1ed1c3dda2c8683c0fe948188f52190bef919a" exitCode=0 Nov 21 15:14:00 crc kubenswrapper[4774]: I1121 15:14:00.272455 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"2c79599e6fdda7a7887496d81a1ed1c3dda2c8683c0fe948188f52190bef919a"} Nov 21 15:14:00 crc kubenswrapper[4774]: I1121 15:14:00.272657 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd"} Nov 21 15:14:00 crc kubenswrapper[4774]: I1121 15:14:00.272697 4774 scope.go:117] "RemoveContainer" containerID="df9565245b542d73c2595b7728229e1d6305836d7bd0564bc268fbc031b92a6b" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.153915 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9"] Nov 21 15:15:00 crc kubenswrapper[4774]: E1121 15:15:00.154631 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="extract-content" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.154644 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="extract-content" Nov 21 15:15:00 crc kubenswrapper[4774]: E1121 15:15:00.154654 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="registry-server" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.154661 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="registry-server" Nov 21 15:15:00 crc kubenswrapper[4774]: E1121 15:15:00.154676 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="extract-utilities" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.154683 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="extract-utilities" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.155247 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="33fa9812-9dd5-447c-935d-accb20dcc416" containerName="registry-server" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.155733 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.162306 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.163038 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.166228 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9"] Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.336856 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a747b3c-1ce8-435b-81de-2736a1a28e60-config-volume\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.336916 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcg2d\" (UniqueName: \"kubernetes.io/projected/6a747b3c-1ce8-435b-81de-2736a1a28e60-kube-api-access-gcg2d\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.337160 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a747b3c-1ce8-435b-81de-2736a1a28e60-secret-volume\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.438038 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a747b3c-1ce8-435b-81de-2736a1a28e60-secret-volume\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.438144 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a747b3c-1ce8-435b-81de-2736a1a28e60-config-volume\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.438185 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcg2d\" (UniqueName: \"kubernetes.io/projected/6a747b3c-1ce8-435b-81de-2736a1a28e60-kube-api-access-gcg2d\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.439553 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a747b3c-1ce8-435b-81de-2736a1a28e60-config-volume\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.453309 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a747b3c-1ce8-435b-81de-2736a1a28e60-secret-volume\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.460089 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcg2d\" (UniqueName: \"kubernetes.io/projected/6a747b3c-1ce8-435b-81de-2736a1a28e60-kube-api-access-gcg2d\") pod \"collect-profiles-29395635-hxld9\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.476859 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:00 crc kubenswrapper[4774]: I1121 15:15:00.956899 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9"] Nov 21 15:15:01 crc kubenswrapper[4774]: I1121 15:15:01.757510 4774 generic.go:334] "Generic (PLEG): container finished" podID="6a747b3c-1ce8-435b-81de-2736a1a28e60" containerID="7981b92e066d00b528d21b9f3f976c8aa87869ee7f050b30aeee71d3d6b3e8f9" exitCode=0 Nov 21 15:15:01 crc kubenswrapper[4774]: I1121 15:15:01.757611 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" event={"ID":"6a747b3c-1ce8-435b-81de-2736a1a28e60","Type":"ContainerDied","Data":"7981b92e066d00b528d21b9f3f976c8aa87869ee7f050b30aeee71d3d6b3e8f9"} Nov 21 15:15:01 crc kubenswrapper[4774]: I1121 15:15:01.757809 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" event={"ID":"6a747b3c-1ce8-435b-81de-2736a1a28e60","Type":"ContainerStarted","Data":"3bd1b774a18069b1580827df984e3f55a8a7a8be87e2ca9a0a6b843c243c41f6"} Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.020479 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.182730 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a747b3c-1ce8-435b-81de-2736a1a28e60-secret-volume\") pod \"6a747b3c-1ce8-435b-81de-2736a1a28e60\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.182788 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a747b3c-1ce8-435b-81de-2736a1a28e60-config-volume\") pod \"6a747b3c-1ce8-435b-81de-2736a1a28e60\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.182874 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcg2d\" (UniqueName: \"kubernetes.io/projected/6a747b3c-1ce8-435b-81de-2736a1a28e60-kube-api-access-gcg2d\") pod \"6a747b3c-1ce8-435b-81de-2736a1a28e60\" (UID: \"6a747b3c-1ce8-435b-81de-2736a1a28e60\") " Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.183631 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a747b3c-1ce8-435b-81de-2736a1a28e60-config-volume" (OuterVolumeSpecName: "config-volume") pod "6a747b3c-1ce8-435b-81de-2736a1a28e60" (UID: "6a747b3c-1ce8-435b-81de-2736a1a28e60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.188573 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a747b3c-1ce8-435b-81de-2736a1a28e60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6a747b3c-1ce8-435b-81de-2736a1a28e60" (UID: "6a747b3c-1ce8-435b-81de-2736a1a28e60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.189975 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a747b3c-1ce8-435b-81de-2736a1a28e60-kube-api-access-gcg2d" (OuterVolumeSpecName: "kube-api-access-gcg2d") pod "6a747b3c-1ce8-435b-81de-2736a1a28e60" (UID: "6a747b3c-1ce8-435b-81de-2736a1a28e60"). InnerVolumeSpecName "kube-api-access-gcg2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.285083 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6a747b3c-1ce8-435b-81de-2736a1a28e60-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.285124 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6a747b3c-1ce8-435b-81de-2736a1a28e60-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.285139 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcg2d\" (UniqueName: \"kubernetes.io/projected/6a747b3c-1ce8-435b-81de-2736a1a28e60-kube-api-access-gcg2d\") on node \"crc\" DevicePath \"\"" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.777028 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.777042 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9" event={"ID":"6a747b3c-1ce8-435b-81de-2736a1a28e60","Type":"ContainerDied","Data":"3bd1b774a18069b1580827df984e3f55a8a7a8be87e2ca9a0a6b843c243c41f6"} Nov 21 15:15:03 crc kubenswrapper[4774]: I1121 15:15:03.777115 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bd1b774a18069b1580827df984e3f55a8a7a8be87e2ca9a0a6b843c243c41f6" Nov 21 15:15:04 crc kubenswrapper[4774]: I1121 15:15:04.088805 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln"] Nov 21 15:15:04 crc kubenswrapper[4774]: I1121 15:15:04.103657 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395590-tknln"] Nov 21 15:15:06 crc kubenswrapper[4774]: I1121 15:15:06.102231 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7012a04a-12f8-44bd-9ab4-d67fa60f12b3" path="/var/lib/kubelet/pods/7012a04a-12f8-44bd-9ab4-d67fa60f12b3/volumes" Nov 21 15:15:39 crc kubenswrapper[4774]: I1121 15:15:39.439811 4774 scope.go:117] "RemoveContainer" containerID="1f679e4e0fc10db92ee9f76e55737746c2b7a1ec82ace36ee8395ac3871079c2" Nov 21 15:15:59 crc kubenswrapper[4774]: I1121 15:15:59.601317 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:15:59 crc kubenswrapper[4774]: I1121 15:15:59.601962 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:16:29 crc kubenswrapper[4774]: I1121 15:16:29.600797 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:16:29 crc kubenswrapper[4774]: I1121 15:16:29.601384 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:16:59 crc kubenswrapper[4774]: I1121 15:16:59.601011 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:16:59 crc kubenswrapper[4774]: I1121 15:16:59.602127 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:16:59 crc kubenswrapper[4774]: I1121 15:16:59.602209 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:16:59 crc kubenswrapper[4774]: I1121 15:16:59.603391 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:16:59 crc kubenswrapper[4774]: I1121 15:16:59.603492 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" gracePeriod=600 Nov 21 15:16:59 crc kubenswrapper[4774]: E1121 15:16:59.785686 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:17:00 crc kubenswrapper[4774]: I1121 15:17:00.636942 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" exitCode=0 Nov 21 15:17:00 crc kubenswrapper[4774]: I1121 15:17:00.637001 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd"} Nov 21 15:17:00 crc kubenswrapper[4774]: I1121 15:17:00.637037 4774 scope.go:117] "RemoveContainer" containerID="2c79599e6fdda7a7887496d81a1ed1c3dda2c8683c0fe948188f52190bef919a" Nov 21 15:17:00 crc kubenswrapper[4774]: I1121 15:17:00.638501 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:17:00 crc kubenswrapper[4774]: E1121 15:17:00.639187 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:17:14 crc kubenswrapper[4774]: I1121 15:17:14.092528 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:17:14 crc kubenswrapper[4774]: E1121 15:17:14.093068 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:17:28 crc kubenswrapper[4774]: I1121 15:17:28.092876 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:17:28 crc kubenswrapper[4774]: E1121 15:17:28.093629 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:17:42 crc kubenswrapper[4774]: I1121 15:17:42.094239 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:17:42 crc kubenswrapper[4774]: E1121 15:17:42.095342 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:17:54 crc kubenswrapper[4774]: I1121 15:17:54.092772 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:17:54 crc kubenswrapper[4774]: E1121 15:17:54.093319 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:18:08 crc kubenswrapper[4774]: I1121 15:18:08.092600 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:18:08 crc kubenswrapper[4774]: E1121 15:18:08.093412 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:18:19 crc kubenswrapper[4774]: I1121 15:18:19.092633 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:18:19 crc kubenswrapper[4774]: E1121 15:18:19.093231 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.170812 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rv6qh"] Nov 21 15:18:26 crc kubenswrapper[4774]: E1121 15:18:26.171374 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a747b3c-1ce8-435b-81de-2736a1a28e60" containerName="collect-profiles" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.171558 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a747b3c-1ce8-435b-81de-2736a1a28e60" containerName="collect-profiles" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.171692 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a747b3c-1ce8-435b-81de-2736a1a28e60" containerName="collect-profiles" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.172634 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.189220 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rv6qh"] Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.344428 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5qp5\" (UniqueName: \"kubernetes.io/projected/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-kube-api-access-z5qp5\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.344476 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-utilities\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.344560 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-catalog-content\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.447612 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5qp5\" (UniqueName: \"kubernetes.io/projected/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-kube-api-access-z5qp5\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.447702 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-utilities\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.447905 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-catalog-content\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.449241 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-catalog-content\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.449944 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-utilities\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.488119 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5qp5\" (UniqueName: \"kubernetes.io/projected/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-kube-api-access-z5qp5\") pod \"certified-operators-rv6qh\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:26 crc kubenswrapper[4774]: I1121 15:18:26.788783 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:27 crc kubenswrapper[4774]: I1121 15:18:27.218027 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rv6qh"] Nov 21 15:18:28 crc kubenswrapper[4774]: I1121 15:18:28.245749 4774 generic.go:334] "Generic (PLEG): container finished" podID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerID="6c61602c385e131e3aff0feb7532031b525b6d21f03cfdac9e32ec335b56416f" exitCode=0 Nov 21 15:18:28 crc kubenswrapper[4774]: I1121 15:18:28.245893 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv6qh" event={"ID":"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42","Type":"ContainerDied","Data":"6c61602c385e131e3aff0feb7532031b525b6d21f03cfdac9e32ec335b56416f"} Nov 21 15:18:28 crc kubenswrapper[4774]: I1121 15:18:28.247605 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv6qh" event={"ID":"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42","Type":"ContainerStarted","Data":"19ef9f8589fbac4322acc1e8b8dbe1d22eabfcaac53ba158bad98390ade7d3a4"} Nov 21 15:18:28 crc kubenswrapper[4774]: I1121 15:18:28.248893 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:18:29 crc kubenswrapper[4774]: I1121 15:18:29.256072 4774 generic.go:334] "Generic (PLEG): container finished" podID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerID="f63d87136a5515dcd5fb44418c8e03d2519c440539741afbc8eb6b3822c9afb4" exitCode=0 Nov 21 15:18:29 crc kubenswrapper[4774]: I1121 15:18:29.256137 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv6qh" event={"ID":"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42","Type":"ContainerDied","Data":"f63d87136a5515dcd5fb44418c8e03d2519c440539741afbc8eb6b3822c9afb4"} Nov 21 15:18:30 crc kubenswrapper[4774]: I1121 15:18:30.266930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv6qh" event={"ID":"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42","Type":"ContainerStarted","Data":"4a1c86f8bd7774926d34cf896373e4ea9df57bbb17ed6cdee1fe88f35be706e0"} Nov 21 15:18:30 crc kubenswrapper[4774]: I1121 15:18:30.285165 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rv6qh" podStartSLOduration=2.8795101929999998 podStartE2EDuration="4.285143781s" podCreationTimestamp="2025-11-21 15:18:26 +0000 UTC" firstStartedPulling="2025-11-21 15:18:28.248577466 +0000 UTC m=+4498.900776725" lastFinishedPulling="2025-11-21 15:18:29.654211054 +0000 UTC m=+4500.306410313" observedRunningTime="2025-11-21 15:18:30.281744234 +0000 UTC m=+4500.933943513" watchObservedRunningTime="2025-11-21 15:18:30.285143781 +0000 UTC m=+4500.937343060" Nov 21 15:18:32 crc kubenswrapper[4774]: I1121 15:18:32.093299 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:18:32 crc kubenswrapper[4774]: E1121 15:18:32.094256 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:18:36 crc kubenswrapper[4774]: I1121 15:18:36.789706 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:36 crc kubenswrapper[4774]: I1121 15:18:36.790878 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:36 crc kubenswrapper[4774]: I1121 15:18:36.845936 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:37 crc kubenswrapper[4774]: I1121 15:18:37.369596 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:37 crc kubenswrapper[4774]: I1121 15:18:37.422528 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rv6qh"] Nov 21 15:18:39 crc kubenswrapper[4774]: I1121 15:18:39.334781 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rv6qh" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="registry-server" containerID="cri-o://4a1c86f8bd7774926d34cf896373e4ea9df57bbb17ed6cdee1fe88f35be706e0" gracePeriod=2 Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.345639 4774 generic.go:334] "Generic (PLEG): container finished" podID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerID="4a1c86f8bd7774926d34cf896373e4ea9df57bbb17ed6cdee1fe88f35be706e0" exitCode=0 Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.345767 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv6qh" event={"ID":"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42","Type":"ContainerDied","Data":"4a1c86f8bd7774926d34cf896373e4ea9df57bbb17ed6cdee1fe88f35be706e0"} Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.396393 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.562092 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-catalog-content\") pod \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.564098 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5qp5\" (UniqueName: \"kubernetes.io/projected/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-kube-api-access-z5qp5\") pod \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.564190 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-utilities\") pod \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\" (UID: \"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42\") " Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.565305 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-utilities" (OuterVolumeSpecName: "utilities") pod "82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" (UID: "82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.569559 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-kube-api-access-z5qp5" (OuterVolumeSpecName: "kube-api-access-z5qp5") pod "82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" (UID: "82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42"). InnerVolumeSpecName "kube-api-access-z5qp5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.617045 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" (UID: "82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.665397 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.665428 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:40 crc kubenswrapper[4774]: I1121 15:18:40.665442 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5qp5\" (UniqueName: \"kubernetes.io/projected/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42-kube-api-access-z5qp5\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.363835 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rv6qh" event={"ID":"82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42","Type":"ContainerDied","Data":"19ef9f8589fbac4322acc1e8b8dbe1d22eabfcaac53ba158bad98390ade7d3a4"} Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.363922 4774 scope.go:117] "RemoveContainer" containerID="4a1c86f8bd7774926d34cf896373e4ea9df57bbb17ed6cdee1fe88f35be706e0" Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.363864 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rv6qh" Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.385575 4774 scope.go:117] "RemoveContainer" containerID="f63d87136a5515dcd5fb44418c8e03d2519c440539741afbc8eb6b3822c9afb4" Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.399990 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rv6qh"] Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.405383 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rv6qh"] Nov 21 15:18:41 crc kubenswrapper[4774]: I1121 15:18:41.420145 4774 scope.go:117] "RemoveContainer" containerID="6c61602c385e131e3aff0feb7532031b525b6d21f03cfdac9e32ec335b56416f" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.102414 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" path="/var/lib/kubelet/pods/82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42/volumes" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.640166 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8pgfz"] Nov 21 15:18:42 crc kubenswrapper[4774]: E1121 15:18:42.640565 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="extract-content" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.640582 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="extract-content" Nov 21 15:18:42 crc kubenswrapper[4774]: E1121 15:18:42.640600 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="registry-server" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.640608 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="registry-server" Nov 21 15:18:42 crc kubenswrapper[4774]: E1121 15:18:42.640627 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="extract-utilities" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.640639 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="extract-utilities" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.640892 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="82f37dbd-a15e-4b9a-8f55-fd2cdf3a9b42" containerName="registry-server" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.642280 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.696396 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pgfz"] Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.791402 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-catalog-content\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.791568 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrhg7\" (UniqueName: \"kubernetes.io/projected/a4773221-0919-4f64-b347-eeceabe758c8-kube-api-access-jrhg7\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.791610 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-utilities\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.893096 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-catalog-content\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.893193 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrhg7\" (UniqueName: \"kubernetes.io/projected/a4773221-0919-4f64-b347-eeceabe758c8-kube-api-access-jrhg7\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.893214 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-utilities\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.893797 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-utilities\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.893796 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-catalog-content\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.912884 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrhg7\" (UniqueName: \"kubernetes.io/projected/a4773221-0919-4f64-b347-eeceabe758c8-kube-api-access-jrhg7\") pod \"redhat-marketplace-8pgfz\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:42 crc kubenswrapper[4774]: I1121 15:18:42.960677 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:43 crc kubenswrapper[4774]: I1121 15:18:43.092811 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:18:43 crc kubenswrapper[4774]: E1121 15:18:43.093582 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:18:43 crc kubenswrapper[4774]: I1121 15:18:43.187060 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pgfz"] Nov 21 15:18:43 crc kubenswrapper[4774]: I1121 15:18:43.380788 4774 generic.go:334] "Generic (PLEG): container finished" podID="a4773221-0919-4f64-b347-eeceabe758c8" containerID="5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5" exitCode=0 Nov 21 15:18:43 crc kubenswrapper[4774]: I1121 15:18:43.380849 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerDied","Data":"5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5"} Nov 21 15:18:43 crc kubenswrapper[4774]: I1121 15:18:43.380877 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerStarted","Data":"cf26e68dfe6e1e2e8ce245414bed3a69a137ba3a195a9f24ca59d211c797600f"} Nov 21 15:18:44 crc kubenswrapper[4774]: I1121 15:18:44.388190 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerStarted","Data":"915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4"} Nov 21 15:18:45 crc kubenswrapper[4774]: I1121 15:18:45.396478 4774 generic.go:334] "Generic (PLEG): container finished" podID="a4773221-0919-4f64-b347-eeceabe758c8" containerID="915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4" exitCode=0 Nov 21 15:18:45 crc kubenswrapper[4774]: I1121 15:18:45.396519 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerDied","Data":"915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4"} Nov 21 15:18:46 crc kubenswrapper[4774]: I1121 15:18:46.404739 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerStarted","Data":"0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309"} Nov 21 15:18:52 crc kubenswrapper[4774]: I1121 15:18:52.961403 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:52 crc kubenswrapper[4774]: I1121 15:18:52.962288 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.003471 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.025500 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8pgfz" podStartSLOduration=8.597907132 podStartE2EDuration="11.025480428s" podCreationTimestamp="2025-11-21 15:18:42 +0000 UTC" firstStartedPulling="2025-11-21 15:18:43.382397616 +0000 UTC m=+4514.034596875" lastFinishedPulling="2025-11-21 15:18:45.809970912 +0000 UTC m=+4516.462170171" observedRunningTime="2025-11-21 15:18:46.429837793 +0000 UTC m=+4517.082037052" watchObservedRunningTime="2025-11-21 15:18:53.025480428 +0000 UTC m=+4523.677679687" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.487674 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.533531 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pgfz"] Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.841208 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-lkp8t"] Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.849864 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-lkp8t"] Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.971987 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-7ntmr"] Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.973220 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.975477 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.975710 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.975638 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.976213 4774 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-2vsdx" Nov 21 15:18:53 crc kubenswrapper[4774]: I1121 15:18:53.981231 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-7ntmr"] Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.056143 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f61d9e12-c26b-47a9-8831-e63628e2e91b-node-mnt\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.056201 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f61d9e12-c26b-47a9-8831-e63628e2e91b-crc-storage\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.056380 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzcrv\" (UniqueName: \"kubernetes.io/projected/f61d9e12-c26b-47a9-8831-e63628e2e91b-kube-api-access-mzcrv\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.100409 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6618c892-4191-4a4c-86bb-00750a4be8b7" path="/var/lib/kubelet/pods/6618c892-4191-4a4c-86bb-00750a4be8b7/volumes" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.158350 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f61d9e12-c26b-47a9-8831-e63628e2e91b-node-mnt\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.158445 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f61d9e12-c26b-47a9-8831-e63628e2e91b-crc-storage\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.158511 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzcrv\" (UniqueName: \"kubernetes.io/projected/f61d9e12-c26b-47a9-8831-e63628e2e91b-kube-api-access-mzcrv\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.158701 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f61d9e12-c26b-47a9-8831-e63628e2e91b-node-mnt\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.159219 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f61d9e12-c26b-47a9-8831-e63628e2e91b-crc-storage\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.177836 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzcrv\" (UniqueName: \"kubernetes.io/projected/f61d9e12-c26b-47a9-8831-e63628e2e91b-kube-api-access-mzcrv\") pod \"crc-storage-crc-7ntmr\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.297101 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:54 crc kubenswrapper[4774]: I1121 15:18:54.742723 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-7ntmr"] Nov 21 15:18:54 crc kubenswrapper[4774]: W1121 15:18:54.748861 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf61d9e12_c26b_47a9_8831_e63628e2e91b.slice/crio-2eb442b64d6d3b3b28c20c86c31d812f813c0547e6bfe045960139e452caa67c WatchSource:0}: Error finding container 2eb442b64d6d3b3b28c20c86c31d812f813c0547e6bfe045960139e452caa67c: Status 404 returned error can't find the container with id 2eb442b64d6d3b3b28c20c86c31d812f813c0547e6bfe045960139e452caa67c Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.463654 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-7ntmr" event={"ID":"f61d9e12-c26b-47a9-8831-e63628e2e91b","Type":"ContainerStarted","Data":"1ce1c59098c450c11143dc9e0076e0b56e688f99ed95cd349b890af6022ac765"} Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.463938 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-7ntmr" event={"ID":"f61d9e12-c26b-47a9-8831-e63628e2e91b","Type":"ContainerStarted","Data":"2eb442b64d6d3b3b28c20c86c31d812f813c0547e6bfe045960139e452caa67c"} Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.463898 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8pgfz" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="registry-server" containerID="cri-o://0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309" gracePeriod=2 Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.487061 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-7ntmr" podStartSLOduration=1.994174445 podStartE2EDuration="2.487039372s" podCreationTimestamp="2025-11-21 15:18:53 +0000 UTC" firstStartedPulling="2025-11-21 15:18:54.750787469 +0000 UTC m=+4525.402986718" lastFinishedPulling="2025-11-21 15:18:55.243652386 +0000 UTC m=+4525.895851645" observedRunningTime="2025-11-21 15:18:55.48100692 +0000 UTC m=+4526.133206179" watchObservedRunningTime="2025-11-21 15:18:55.487039372 +0000 UTC m=+4526.139238631" Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.892937 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.996382 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-utilities\") pod \"a4773221-0919-4f64-b347-eeceabe758c8\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.996542 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrhg7\" (UniqueName: \"kubernetes.io/projected/a4773221-0919-4f64-b347-eeceabe758c8-kube-api-access-jrhg7\") pod \"a4773221-0919-4f64-b347-eeceabe758c8\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.996615 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-catalog-content\") pod \"a4773221-0919-4f64-b347-eeceabe758c8\" (UID: \"a4773221-0919-4f64-b347-eeceabe758c8\") " Nov 21 15:18:55 crc kubenswrapper[4774]: I1121 15:18:55.997523 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-utilities" (OuterVolumeSpecName: "utilities") pod "a4773221-0919-4f64-b347-eeceabe758c8" (UID: "a4773221-0919-4f64-b347-eeceabe758c8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.007090 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4773221-0919-4f64-b347-eeceabe758c8-kube-api-access-jrhg7" (OuterVolumeSpecName: "kube-api-access-jrhg7") pod "a4773221-0919-4f64-b347-eeceabe758c8" (UID: "a4773221-0919-4f64-b347-eeceabe758c8"). InnerVolumeSpecName "kube-api-access-jrhg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.014259 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4773221-0919-4f64-b347-eeceabe758c8" (UID: "a4773221-0919-4f64-b347-eeceabe758c8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.097857 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrhg7\" (UniqueName: \"kubernetes.io/projected/a4773221-0919-4f64-b347-eeceabe758c8-kube-api-access-jrhg7\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.097894 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.097904 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4773221-0919-4f64-b347-eeceabe758c8-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.472714 4774 generic.go:334] "Generic (PLEG): container finished" podID="a4773221-0919-4f64-b347-eeceabe758c8" containerID="0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309" exitCode=0 Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.472781 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerDied","Data":"0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309"} Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.472834 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8pgfz" event={"ID":"a4773221-0919-4f64-b347-eeceabe758c8","Type":"ContainerDied","Data":"cf26e68dfe6e1e2e8ce245414bed3a69a137ba3a195a9f24ca59d211c797600f"} Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.472857 4774 scope.go:117] "RemoveContainer" containerID="0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.472987 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8pgfz" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.475615 4774 generic.go:334] "Generic (PLEG): container finished" podID="f61d9e12-c26b-47a9-8831-e63628e2e91b" containerID="1ce1c59098c450c11143dc9e0076e0b56e688f99ed95cd349b890af6022ac765" exitCode=0 Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.475651 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-7ntmr" event={"ID":"f61d9e12-c26b-47a9-8831-e63628e2e91b","Type":"ContainerDied","Data":"1ce1c59098c450c11143dc9e0076e0b56e688f99ed95cd349b890af6022ac765"} Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.504746 4774 scope.go:117] "RemoveContainer" containerID="915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.512809 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pgfz"] Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.518260 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8pgfz"] Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.529480 4774 scope.go:117] "RemoveContainer" containerID="5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.549097 4774 scope.go:117] "RemoveContainer" containerID="0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309" Nov 21 15:18:56 crc kubenswrapper[4774]: E1121 15:18:56.549529 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309\": container with ID starting with 0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309 not found: ID does not exist" containerID="0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.549569 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309"} err="failed to get container status \"0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309\": rpc error: code = NotFound desc = could not find container \"0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309\": container with ID starting with 0439bed8198a52cd2d6e12fcb09fb81a0d88880a9a06fabe97f9f9861839b309 not found: ID does not exist" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.549596 4774 scope.go:117] "RemoveContainer" containerID="915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4" Nov 21 15:18:56 crc kubenswrapper[4774]: E1121 15:18:56.550119 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4\": container with ID starting with 915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4 not found: ID does not exist" containerID="915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.550145 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4"} err="failed to get container status \"915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4\": rpc error: code = NotFound desc = could not find container \"915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4\": container with ID starting with 915231021c72a5c0c3db00ebde3464d15c15e18f8eb76b3b88b6c7179a3308c4 not found: ID does not exist" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.550158 4774 scope.go:117] "RemoveContainer" containerID="5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5" Nov 21 15:18:56 crc kubenswrapper[4774]: E1121 15:18:56.550442 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5\": container with ID starting with 5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5 not found: ID does not exist" containerID="5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5" Nov 21 15:18:56 crc kubenswrapper[4774]: I1121 15:18:56.550460 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5"} err="failed to get container status \"5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5\": rpc error: code = NotFound desc = could not find container \"5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5\": container with ID starting with 5740858565b3d2c22876bc7c42fa15aa5ee8ce70c811ce0b95e995a164b774f5 not found: ID does not exist" Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.093918 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:18:57 crc kubenswrapper[4774]: E1121 15:18:57.094093 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.764147 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.920722 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f61d9e12-c26b-47a9-8831-e63628e2e91b-crc-storage\") pod \"f61d9e12-c26b-47a9-8831-e63628e2e91b\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.921308 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f61d9e12-c26b-47a9-8831-e63628e2e91b-node-mnt\") pod \"f61d9e12-c26b-47a9-8831-e63628e2e91b\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.921517 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzcrv\" (UniqueName: \"kubernetes.io/projected/f61d9e12-c26b-47a9-8831-e63628e2e91b-kube-api-access-mzcrv\") pod \"f61d9e12-c26b-47a9-8831-e63628e2e91b\" (UID: \"f61d9e12-c26b-47a9-8831-e63628e2e91b\") " Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.921466 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f61d9e12-c26b-47a9-8831-e63628e2e91b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "f61d9e12-c26b-47a9-8831-e63628e2e91b" (UID: "f61d9e12-c26b-47a9-8831-e63628e2e91b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.923380 4774 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f61d9e12-c26b-47a9-8831-e63628e2e91b-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.931166 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f61d9e12-c26b-47a9-8831-e63628e2e91b-kube-api-access-mzcrv" (OuterVolumeSpecName: "kube-api-access-mzcrv") pod "f61d9e12-c26b-47a9-8831-e63628e2e91b" (UID: "f61d9e12-c26b-47a9-8831-e63628e2e91b"). InnerVolumeSpecName "kube-api-access-mzcrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:18:57 crc kubenswrapper[4774]: I1121 15:18:57.942946 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f61d9e12-c26b-47a9-8831-e63628e2e91b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "f61d9e12-c26b-47a9-8831-e63628e2e91b" (UID: "f61d9e12-c26b-47a9-8831-e63628e2e91b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:18:58 crc kubenswrapper[4774]: I1121 15:18:58.024619 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzcrv\" (UniqueName: \"kubernetes.io/projected/f61d9e12-c26b-47a9-8831-e63628e2e91b-kube-api-access-mzcrv\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:58 crc kubenswrapper[4774]: I1121 15:18:58.024648 4774 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f61d9e12-c26b-47a9-8831-e63628e2e91b-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 21 15:18:58 crc kubenswrapper[4774]: I1121 15:18:58.103208 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4773221-0919-4f64-b347-eeceabe758c8" path="/var/lib/kubelet/pods/a4773221-0919-4f64-b347-eeceabe758c8/volumes" Nov 21 15:18:58 crc kubenswrapper[4774]: I1121 15:18:58.491167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-7ntmr" event={"ID":"f61d9e12-c26b-47a9-8831-e63628e2e91b","Type":"ContainerDied","Data":"2eb442b64d6d3b3b28c20c86c31d812f813c0547e6bfe045960139e452caa67c"} Nov 21 15:18:58 crc kubenswrapper[4774]: I1121 15:18:58.491517 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eb442b64d6d3b3b28c20c86c31d812f813c0547e6bfe045960139e452caa67c" Nov 21 15:18:58 crc kubenswrapper[4774]: I1121 15:18:58.491252 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-7ntmr" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.618933 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-7ntmr"] Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.628222 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-7ntmr"] Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.767996 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-kz4kv"] Nov 21 15:18:59 crc kubenswrapper[4774]: E1121 15:18:59.768274 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="extract-content" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768286 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="extract-content" Nov 21 15:18:59 crc kubenswrapper[4774]: E1121 15:18:59.768314 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="extract-utilities" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768321 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="extract-utilities" Nov 21 15:18:59 crc kubenswrapper[4774]: E1121 15:18:59.768331 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="registry-server" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768337 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="registry-server" Nov 21 15:18:59 crc kubenswrapper[4774]: E1121 15:18:59.768350 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f61d9e12-c26b-47a9-8831-e63628e2e91b" containerName="storage" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768356 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f61d9e12-c26b-47a9-8831-e63628e2e91b" containerName="storage" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768479 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f61d9e12-c26b-47a9-8831-e63628e2e91b" containerName="storage" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768498 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4773221-0919-4f64-b347-eeceabe758c8" containerName="registry-server" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.768948 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.771213 4774 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-2vsdx" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.771432 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.771550 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.772336 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.785658 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kz4kv"] Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.849486 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8svd\" (UniqueName: \"kubernetes.io/projected/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-kube-api-access-q8svd\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.849636 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-node-mnt\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.849778 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-crc-storage\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.951049 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8svd\" (UniqueName: \"kubernetes.io/projected/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-kube-api-access-q8svd\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.951199 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-node-mnt\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.951276 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-crc-storage\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.951469 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-node-mnt\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.952213 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-crc-storage\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:18:59 crc kubenswrapper[4774]: I1121 15:18:59.971393 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8svd\" (UniqueName: \"kubernetes.io/projected/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-kube-api-access-q8svd\") pod \"crc-storage-crc-kz4kv\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:19:00 crc kubenswrapper[4774]: I1121 15:19:00.081707 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:19:00 crc kubenswrapper[4774]: I1121 15:19:00.114854 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f61d9e12-c26b-47a9-8831-e63628e2e91b" path="/var/lib/kubelet/pods/f61d9e12-c26b-47a9-8831-e63628e2e91b/volumes" Nov 21 15:19:00 crc kubenswrapper[4774]: I1121 15:19:00.928578 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-kz4kv"] Nov 21 15:19:01 crc kubenswrapper[4774]: I1121 15:19:01.511830 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kz4kv" event={"ID":"9b0f91ea-c98d-40e5-a5df-f808e79f6c33","Type":"ContainerStarted","Data":"de5b2eff3e9cf611ab1fa9023352d6ebe69816133e97ee299f92d035fe95c0ef"} Nov 21 15:19:02 crc kubenswrapper[4774]: I1121 15:19:02.520687 4774 generic.go:334] "Generic (PLEG): container finished" podID="9b0f91ea-c98d-40e5-a5df-f808e79f6c33" containerID="5d96e540f34bf754342b6a9fa2bc3dc47e781b8b7cc735bf54aaaa6dcb4ef715" exitCode=0 Nov 21 15:19:02 crc kubenswrapper[4774]: I1121 15:19:02.520752 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kz4kv" event={"ID":"9b0f91ea-c98d-40e5-a5df-f808e79f6c33","Type":"ContainerDied","Data":"5d96e540f34bf754342b6a9fa2bc3dc47e781b8b7cc735bf54aaaa6dcb4ef715"} Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.798487 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.914350 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8svd\" (UniqueName: \"kubernetes.io/projected/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-kube-api-access-q8svd\") pod \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.914774 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-crc-storage\") pod \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.914985 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-node-mnt\") pod \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\" (UID: \"9b0f91ea-c98d-40e5-a5df-f808e79f6c33\") " Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.915153 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "9b0f91ea-c98d-40e5-a5df-f808e79f6c33" (UID: "9b0f91ea-c98d-40e5-a5df-f808e79f6c33"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.915489 4774 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-node-mnt\") on node \"crc\" DevicePath \"\"" Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.919492 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-kube-api-access-q8svd" (OuterVolumeSpecName: "kube-api-access-q8svd") pod "9b0f91ea-c98d-40e5-a5df-f808e79f6c33" (UID: "9b0f91ea-c98d-40e5-a5df-f808e79f6c33"). InnerVolumeSpecName "kube-api-access-q8svd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:19:03 crc kubenswrapper[4774]: I1121 15:19:03.931800 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "9b0f91ea-c98d-40e5-a5df-f808e79f6c33" (UID: "9b0f91ea-c98d-40e5-a5df-f808e79f6c33"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:19:04 crc kubenswrapper[4774]: I1121 15:19:04.016988 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8svd\" (UniqueName: \"kubernetes.io/projected/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-kube-api-access-q8svd\") on node \"crc\" DevicePath \"\"" Nov 21 15:19:04 crc kubenswrapper[4774]: I1121 15:19:04.017031 4774 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9b0f91ea-c98d-40e5-a5df-f808e79f6c33-crc-storage\") on node \"crc\" DevicePath \"\"" Nov 21 15:19:04 crc kubenswrapper[4774]: I1121 15:19:04.541763 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-kz4kv" event={"ID":"9b0f91ea-c98d-40e5-a5df-f808e79f6c33","Type":"ContainerDied","Data":"de5b2eff3e9cf611ab1fa9023352d6ebe69816133e97ee299f92d035fe95c0ef"} Nov 21 15:19:04 crc kubenswrapper[4774]: I1121 15:19:04.541851 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de5b2eff3e9cf611ab1fa9023352d6ebe69816133e97ee299f92d035fe95c0ef" Nov 21 15:19:04 crc kubenswrapper[4774]: I1121 15:19:04.541922 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-kz4kv" Nov 21 15:19:10 crc kubenswrapper[4774]: I1121 15:19:10.102981 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:19:10 crc kubenswrapper[4774]: E1121 15:19:10.104066 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:19:22 crc kubenswrapper[4774]: I1121 15:19:22.092794 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:19:22 crc kubenswrapper[4774]: E1121 15:19:22.093521 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:19:33 crc kubenswrapper[4774]: I1121 15:19:33.093788 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:19:33 crc kubenswrapper[4774]: E1121 15:19:33.094550 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:19:39 crc kubenswrapper[4774]: I1121 15:19:39.522150 4774 scope.go:117] "RemoveContainer" containerID="d3191b2621bdbd561d3965cf16b6dc23c4cf65be6a7481c15b42d2cdb8991b50" Nov 21 15:19:45 crc kubenswrapper[4774]: I1121 15:19:45.094131 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:19:45 crc kubenswrapper[4774]: E1121 15:19:45.095932 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:19:58 crc kubenswrapper[4774]: I1121 15:19:58.092938 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:19:58 crc kubenswrapper[4774]: E1121 15:19:58.093685 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:20:10 crc kubenswrapper[4774]: I1121 15:20:10.096582 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:20:10 crc kubenswrapper[4774]: E1121 15:20:10.097348 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:20:24 crc kubenswrapper[4774]: I1121 15:20:24.093094 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:20:24 crc kubenswrapper[4774]: E1121 15:20:24.093807 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:20:36 crc kubenswrapper[4774]: I1121 15:20:36.093627 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:20:36 crc kubenswrapper[4774]: E1121 15:20:36.094381 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:20:48 crc kubenswrapper[4774]: I1121 15:20:48.093206 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:20:48 crc kubenswrapper[4774]: E1121 15:20:48.093877 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:21:02 crc kubenswrapper[4774]: I1121 15:21:02.093064 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:21:02 crc kubenswrapper[4774]: E1121 15:21:02.093811 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:21:13 crc kubenswrapper[4774]: I1121 15:21:13.092802 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:21:13 crc kubenswrapper[4774]: E1121 15:21:13.093682 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:21:25 crc kubenswrapper[4774]: I1121 15:21:25.092866 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:21:25 crc kubenswrapper[4774]: E1121 15:21:25.093471 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.611980 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tjbgb"] Nov 21 15:21:31 crc kubenswrapper[4774]: E1121 15:21:31.613020 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b0f91ea-c98d-40e5-a5df-f808e79f6c33" containerName="storage" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.613047 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b0f91ea-c98d-40e5-a5df-f808e79f6c33" containerName="storage" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.613591 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b0f91ea-c98d-40e5-a5df-f808e79f6c33" containerName="storage" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.615012 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.621921 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjbgb"] Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.628423 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sf62\" (UniqueName: \"kubernetes.io/projected/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-kube-api-access-6sf62\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.628485 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-utilities\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.628599 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-catalog-content\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.729939 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sf62\" (UniqueName: \"kubernetes.io/projected/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-kube-api-access-6sf62\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.730000 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-utilities\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.730059 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-catalog-content\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.730540 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-catalog-content\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.731034 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-utilities\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.748609 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sf62\" (UniqueName: \"kubernetes.io/projected/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-kube-api-access-6sf62\") pod \"redhat-operators-tjbgb\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:31 crc kubenswrapper[4774]: I1121 15:21:31.985941 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:32 crc kubenswrapper[4774]: I1121 15:21:32.444561 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tjbgb"] Nov 21 15:21:32 crc kubenswrapper[4774]: I1121 15:21:32.627417 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjbgb" event={"ID":"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a","Type":"ContainerStarted","Data":"d7c7761970815cf32fdc457baf0d5dc0b9bc5d2e05938127e059c551e40b869f"} Nov 21 15:21:33 crc kubenswrapper[4774]: I1121 15:21:33.636279 4774 generic.go:334] "Generic (PLEG): container finished" podID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerID="192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2" exitCode=0 Nov 21 15:21:33 crc kubenswrapper[4774]: I1121 15:21:33.636339 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjbgb" event={"ID":"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a","Type":"ContainerDied","Data":"192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2"} Nov 21 15:21:35 crc kubenswrapper[4774]: I1121 15:21:35.654335 4774 generic.go:334] "Generic (PLEG): container finished" podID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerID="bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f" exitCode=0 Nov 21 15:21:35 crc kubenswrapper[4774]: I1121 15:21:35.654382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjbgb" event={"ID":"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a","Type":"ContainerDied","Data":"bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f"} Nov 21 15:21:36 crc kubenswrapper[4774]: I1121 15:21:36.664532 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjbgb" event={"ID":"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a","Type":"ContainerStarted","Data":"e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1"} Nov 21 15:21:36 crc kubenswrapper[4774]: I1121 15:21:36.684906 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tjbgb" podStartSLOduration=3.17580132 podStartE2EDuration="5.684891531s" podCreationTimestamp="2025-11-21 15:21:31 +0000 UTC" firstStartedPulling="2025-11-21 15:21:33.637962509 +0000 UTC m=+4684.290161768" lastFinishedPulling="2025-11-21 15:21:36.14705272 +0000 UTC m=+4686.799251979" observedRunningTime="2025-11-21 15:21:36.680877536 +0000 UTC m=+4687.333076805" watchObservedRunningTime="2025-11-21 15:21:36.684891531 +0000 UTC m=+4687.337090790" Nov 21 15:21:39 crc kubenswrapper[4774]: I1121 15:21:39.093020 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:21:39 crc kubenswrapper[4774]: E1121 15:21:39.093764 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:21:41 crc kubenswrapper[4774]: I1121 15:21:41.986921 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:41 crc kubenswrapper[4774]: I1121 15:21:41.989075 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:42 crc kubenswrapper[4774]: I1121 15:21:42.035616 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:42 crc kubenswrapper[4774]: I1121 15:21:42.755033 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:42 crc kubenswrapper[4774]: I1121 15:21:42.803218 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjbgb"] Nov 21 15:21:44 crc kubenswrapper[4774]: I1121 15:21:44.717723 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tjbgb" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="registry-server" containerID="cri-o://e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1" gracePeriod=2 Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.232683 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.420518 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sf62\" (UniqueName: \"kubernetes.io/projected/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-kube-api-access-6sf62\") pod \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.420612 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-catalog-content\") pod \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.420633 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-utilities\") pod \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\" (UID: \"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a\") " Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.421876 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-utilities" (OuterVolumeSpecName: "utilities") pod "a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" (UID: "a35882df-b8e6-42dc-abd3-c3ea61bcbd5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.428130 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-kube-api-access-6sf62" (OuterVolumeSpecName: "kube-api-access-6sf62") pod "a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" (UID: "a35882df-b8e6-42dc-abd3-c3ea61bcbd5a"). InnerVolumeSpecName "kube-api-access-6sf62". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.522869 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sf62\" (UniqueName: \"kubernetes.io/projected/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-kube-api-access-6sf62\") on node \"crc\" DevicePath \"\"" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.523193 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.728306 4774 generic.go:334] "Generic (PLEG): container finished" podID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerID="e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1" exitCode=0 Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.728354 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjbgb" event={"ID":"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a","Type":"ContainerDied","Data":"e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1"} Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.728380 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tjbgb" event={"ID":"a35882df-b8e6-42dc-abd3-c3ea61bcbd5a","Type":"ContainerDied","Data":"d7c7761970815cf32fdc457baf0d5dc0b9bc5d2e05938127e059c551e40b869f"} Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.728398 4774 scope.go:117] "RemoveContainer" containerID="e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.728517 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tjbgb" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.756327 4774 scope.go:117] "RemoveContainer" containerID="bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.773554 4774 scope.go:117] "RemoveContainer" containerID="192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.795667 4774 scope.go:117] "RemoveContainer" containerID="e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1" Nov 21 15:21:45 crc kubenswrapper[4774]: E1121 15:21:45.796290 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1\": container with ID starting with e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1 not found: ID does not exist" containerID="e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.796385 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1"} err="failed to get container status \"e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1\": rpc error: code = NotFound desc = could not find container \"e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1\": container with ID starting with e57074f22608fad69eeace2275794af3915dbacf4ffc43bc32bb5a0e233787e1 not found: ID does not exist" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.796476 4774 scope.go:117] "RemoveContainer" containerID="bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f" Nov 21 15:21:45 crc kubenswrapper[4774]: E1121 15:21:45.797086 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f\": container with ID starting with bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f not found: ID does not exist" containerID="bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.797163 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f"} err="failed to get container status \"bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f\": rpc error: code = NotFound desc = could not find container \"bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f\": container with ID starting with bfffce4f809c405993b2805a3b42a11f522fec5278b7ee739ab1bee64f35221f not found: ID does not exist" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.797227 4774 scope.go:117] "RemoveContainer" containerID="192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2" Nov 21 15:21:45 crc kubenswrapper[4774]: E1121 15:21:45.797580 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2\": container with ID starting with 192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2 not found: ID does not exist" containerID="192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2" Nov 21 15:21:45 crc kubenswrapper[4774]: I1121 15:21:45.797694 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2"} err="failed to get container status \"192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2\": rpc error: code = NotFound desc = could not find container \"192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2\": container with ID starting with 192e26e95775f0218e725fff34de324cd4ed6c3974c4c4989f6c0b173de518b2 not found: ID does not exist" Nov 21 15:21:47 crc kubenswrapper[4774]: I1121 15:21:47.500166 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" (UID: "a35882df-b8e6-42dc-abd3-c3ea61bcbd5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:21:47 crc kubenswrapper[4774]: I1121 15:21:47.563496 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:21:47 crc kubenswrapper[4774]: I1121 15:21:47.565561 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tjbgb"] Nov 21 15:21:47 crc kubenswrapper[4774]: I1121 15:21:47.571158 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tjbgb"] Nov 21 15:21:48 crc kubenswrapper[4774]: I1121 15:21:48.105430 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" path="/var/lib/kubelet/pods/a35882df-b8e6-42dc-abd3-c3ea61bcbd5a/volumes" Nov 21 15:21:52 crc kubenswrapper[4774]: I1121 15:21:52.093712 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:21:52 crc kubenswrapper[4774]: E1121 15:21:52.095132 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:22:05 crc kubenswrapper[4774]: I1121 15:22:05.093042 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:22:05 crc kubenswrapper[4774]: I1121 15:22:05.889671 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"55e5bb7215c0e7b96cb956f72f36e0242e48205521d6294be282760e5b7b20ab"} Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.685447 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-66d656cccf-vqh4x"] Nov 21 15:22:13 crc kubenswrapper[4774]: E1121 15:22:13.686322 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="registry-server" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.686338 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="registry-server" Nov 21 15:22:13 crc kubenswrapper[4774]: E1121 15:22:13.686363 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="extract-content" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.686371 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="extract-content" Nov 21 15:22:13 crc kubenswrapper[4774]: E1121 15:22:13.686387 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="extract-utilities" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.686396 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="extract-utilities" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.686573 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a35882df-b8e6-42dc-abd3-c3ea61bcbd5a" containerName="registry-server" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.687533 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.691024 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.691026 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.691026 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.691541 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.691753 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-2zwqp" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.698637 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66d656cccf-vqh4x"] Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.827495 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2vqk\" (UniqueName: \"kubernetes.io/projected/019550b7-2420-4105-b39b-103219f766e3-kube-api-access-v2vqk\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.827587 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-dns-svc\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.827633 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-config\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.896565 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-777df6d877-mdvmt"] Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.897877 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.922196 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-777df6d877-mdvmt"] Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.929422 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2vqk\" (UniqueName: \"kubernetes.io/projected/019550b7-2420-4105-b39b-103219f766e3-kube-api-access-v2vqk\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.929510 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-dns-svc\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.929558 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-config\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.930391 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-dns-svc\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.930462 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-config\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:13 crc kubenswrapper[4774]: I1121 15:22:13.975792 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2vqk\" (UniqueName: \"kubernetes.io/projected/019550b7-2420-4105-b39b-103219f766e3-kube-api-access-v2vqk\") pod \"dnsmasq-dns-66d656cccf-vqh4x\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.016352 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.032800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czv8z\" (UniqueName: \"kubernetes.io/projected/cd686dda-40cb-412e-96b2-2a40efe1b4f7-kube-api-access-czv8z\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.032935 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-dns-svc\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.033004 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-config\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.136711 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czv8z\" (UniqueName: \"kubernetes.io/projected/cd686dda-40cb-412e-96b2-2a40efe1b4f7-kube-api-access-czv8z\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.136756 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-dns-svc\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.136805 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-config\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.138066 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-dns-svc\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.138425 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-config\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.183786 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czv8z\" (UniqueName: \"kubernetes.io/projected/cd686dda-40cb-412e-96b2-2a40efe1b4f7-kube-api-access-czv8z\") pod \"dnsmasq-dns-777df6d877-mdvmt\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.213319 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.533000 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-66d656cccf-vqh4x"] Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.641361 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-777df6d877-mdvmt"] Nov 21 15:22:14 crc kubenswrapper[4774]: W1121 15:22:14.646318 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd686dda_40cb_412e_96b2_2a40efe1b4f7.slice/crio-8a5bee76da33b7475eeaed1cf3235fb904ff738f51c5e558545b15b2d357f3a8 WatchSource:0}: Error finding container 8a5bee76da33b7475eeaed1cf3235fb904ff738f51c5e558545b15b2d357f3a8: Status 404 returned error can't find the container with id 8a5bee76da33b7475eeaed1cf3235fb904ff738f51c5e558545b15b2d357f3a8 Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.803903 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.805174 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.808726 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.809009 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.809029 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-869bx" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.809021 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.814062 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.824433 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.948776 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wtd6\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-kube-api-access-6wtd6\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.948849 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8532c113-de51-49d6-9cd2-9e161b8e844f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.948926 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.948968 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.949012 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.949042 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.949071 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.949091 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.949129 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8532c113-de51-49d6-9cd2-9e161b8e844f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.955217 4774 generic.go:334] "Generic (PLEG): container finished" podID="019550b7-2420-4105-b39b-103219f766e3" containerID="a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37" exitCode=0 Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.955260 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" event={"ID":"019550b7-2420-4105-b39b-103219f766e3","Type":"ContainerDied","Data":"a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37"} Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.955309 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" event={"ID":"019550b7-2420-4105-b39b-103219f766e3","Type":"ContainerStarted","Data":"ae1bffc6e3fa2cfc7fb96ffedba0497bd2ac20e77b88661d2c7c705df13c6b26"} Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.957803 4774 generic.go:334] "Generic (PLEG): container finished" podID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerID="c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2" exitCode=0 Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.957847 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" event={"ID":"cd686dda-40cb-412e-96b2-2a40efe1b4f7","Type":"ContainerDied","Data":"c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2"} Nov 21 15:22:14 crc kubenswrapper[4774]: I1121 15:22:14.957868 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" event={"ID":"cd686dda-40cb-412e-96b2-2a40efe1b4f7","Type":"ContainerStarted","Data":"8a5bee76da33b7475eeaed1cf3235fb904ff738f51c5e558545b15b2d357f3a8"} Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054162 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054585 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054633 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054671 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054695 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054760 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8532c113-de51-49d6-9cd2-9e161b8e844f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.054950 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wtd6\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-kube-api-access-6wtd6\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.055018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8532c113-de51-49d6-9cd2-9e161b8e844f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.055065 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.055384 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.057300 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.058437 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.058458 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6b4630e21b2bb3e6513d4f380d3716c3b1abd0736fd9075181f25a5637de358/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.058500 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.059743 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.064252 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8532c113-de51-49d6-9cd2-9e161b8e844f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.067710 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.077913 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8532c113-de51-49d6-9cd2-9e161b8e844f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.079406 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.080852 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.085496 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.085766 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wtd6\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-kube-api-access-6wtd6\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.085777 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-44k7v" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.085838 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.086050 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.090542 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.101399 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.105105 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.133325 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157090 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157174 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157211 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157305 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157350 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157387 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-247zl\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-kube-api-access-247zl\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157441 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157542 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.157579 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.258987 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259055 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259083 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259144 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259174 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259197 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259234 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259260 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.259286 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-247zl\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-kube-api-access-247zl\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.260344 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.261386 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.263666 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.263752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.263763 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.263992 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.282016 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.282073 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c37317031bf95367ef9f439d6aac52a5ab67351d92ce77de835f97768d8da840/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.283192 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.297626 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-247zl\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-kube-api-access-247zl\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.365801 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.511219 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:22:15 crc kubenswrapper[4774]: W1121 15:22:15.522405 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8532c113_de51_49d6_9cd2_9e161b8e844f.slice/crio-aeb4ab6f04d1fe363a6f4c8a3bc505cd700e0e32c37ce917217f5f3aba30ba61 WatchSource:0}: Error finding container aeb4ab6f04d1fe363a6f4c8a3bc505cd700e0e32c37ce917217f5f3aba30ba61: Status 404 returned error can't find the container with id aeb4ab6f04d1fe363a6f4c8a3bc505cd700e0e32c37ce917217f5f3aba30ba61 Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.598980 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.808528 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.965641 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d","Type":"ContainerStarted","Data":"aa9c0c6cab04657956f143a584a94088b77d9c163fdeb441b70452b76e778fb4"} Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.968189 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" event={"ID":"019550b7-2420-4105-b39b-103219f766e3","Type":"ContainerStarted","Data":"92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc"} Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.968379 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.969724 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" event={"ID":"cd686dda-40cb-412e-96b2-2a40efe1b4f7","Type":"ContainerStarted","Data":"3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446"} Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.969941 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.970491 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8532c113-de51-49d6-9cd2-9e161b8e844f","Type":"ContainerStarted","Data":"aeb4ab6f04d1fe363a6f4c8a3bc505cd700e0e32c37ce917217f5f3aba30ba61"} Nov 21 15:22:15 crc kubenswrapper[4774]: I1121 15:22:15.986205 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" podStartSLOduration=2.98618893 podStartE2EDuration="2.98618893s" podCreationTimestamp="2025-11-21 15:22:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:15.983773351 +0000 UTC m=+4726.635972610" watchObservedRunningTime="2025-11-21 15:22:15.98618893 +0000 UTC m=+4726.638388189" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.006361 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" podStartSLOduration=3.006339656 podStartE2EDuration="3.006339656s" podCreationTimestamp="2025-11-21 15:22:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:16.000157649 +0000 UTC m=+4726.652356918" watchObservedRunningTime="2025-11-21 15:22:16.006339656 +0000 UTC m=+4726.658538925" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.386053 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.387577 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.390170 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-6p7hf" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.390437 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.390657 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.390800 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.401209 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.403784 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476288 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-config-data-default\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476330 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476361 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476379 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476397 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj44v\" (UniqueName: \"kubernetes.io/projected/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-kube-api-access-dj44v\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476460 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476491 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.476530 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-kolla-config\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.578630 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-kolla-config\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.578754 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-config-data-default\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.579853 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-config-data-default\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.582660 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-kolla-config\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.582710 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.582852 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.582896 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.582942 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj44v\" (UniqueName: \"kubernetes.io/projected/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-kube-api-access-dj44v\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.583063 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.583185 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.583379 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.584431 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.588683 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.589708 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.589749 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/092ed77a7c544f6e6143e8c5fa5a37aa20f1ba7ac7f8ffe71644b4ed0b3aa139/globalmount\"" pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.592090 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.706675 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj44v\" (UniqueName: \"kubernetes.io/projected/14d4cc6c-e087-4c4c-8d38-2c0dce0d210c-kube-api-access-dj44v\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.783346 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.784566 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.787879 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-vng9g" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.788190 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.795552 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.887361 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/730f0937-f89d-410c-879c-5f561cffc548-kolla-config\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.887403 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk8sd\" (UniqueName: \"kubernetes.io/projected/730f0937-f89d-410c-879c-5f561cffc548-kube-api-access-jk8sd\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.887487 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/730f0937-f89d-410c-879c-5f561cffc548-config-data\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.916991 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-7a5bfe8f-2e12-45e4-8169-48f988fea8fd\") pod \"openstack-galera-0\" (UID: \"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c\") " pod="openstack/openstack-galera-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.979302 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8532c113-de51-49d6-9cd2-9e161b8e844f","Type":"ContainerStarted","Data":"b247e4017a246f9b9f8698ac0059b194728d910484bd0f40acc276ae51f928b1"} Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.988584 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/730f0937-f89d-410c-879c-5f561cffc548-kolla-config\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.988880 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk8sd\" (UniqueName: \"kubernetes.io/projected/730f0937-f89d-410c-879c-5f561cffc548-kube-api-access-jk8sd\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.989120 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/730f0937-f89d-410c-879c-5f561cffc548-config-data\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.989456 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/730f0937-f89d-410c-879c-5f561cffc548-kolla-config\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:16 crc kubenswrapper[4774]: I1121 15:22:16.989811 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/730f0937-f89d-410c-879c-5f561cffc548-config-data\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.005374 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.007813 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk8sd\" (UniqueName: \"kubernetes.io/projected/730f0937-f89d-410c-879c-5f561cffc548-kube-api-access-jk8sd\") pod \"memcached-0\" (UID: \"730f0937-f89d-410c-879c-5f561cffc548\") " pod="openstack/memcached-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.115240 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.273075 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.592660 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Nov 21 15:22:17 crc kubenswrapper[4774]: W1121 15:22:17.602444 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod730f0937_f89d_410c_879c_5f561cffc548.slice/crio-d0dd847a0bfefcdfd50e0edbaa7ce13c4d3841d47de1a459a910e3986c7464e4 WatchSource:0}: Error finding container d0dd847a0bfefcdfd50e0edbaa7ce13c4d3841d47de1a459a910e3986c7464e4: Status 404 returned error can't find the container with id d0dd847a0bfefcdfd50e0edbaa7ce13c4d3841d47de1a459a910e3986c7464e4 Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.860020 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.861218 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.863660 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-gs767" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.863892 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.864071 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.864215 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.879761 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905463 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b63eba9-23bf-47d2-8568-370d10b96150-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905519 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905612 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905663 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905739 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905770 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtpnb\" (UniqueName: \"kubernetes.io/projected/6b63eba9-23bf-47d2-8568-370d10b96150-kube-api-access-dtpnb\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.905934 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b63eba9-23bf-47d2-8568-370d10b96150-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.906007 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b63eba9-23bf-47d2-8568-370d10b96150-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.987562 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"730f0937-f89d-410c-879c-5f561cffc548","Type":"ContainerStarted","Data":"ffc3bc1d9a344e2cc77e637b952fd1a660c95fa9ae6b79dd5f83174a2bfd0be1"} Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.987607 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"730f0937-f89d-410c-879c-5f561cffc548","Type":"ContainerStarted","Data":"d0dd847a0bfefcdfd50e0edbaa7ce13c4d3841d47de1a459a910e3986c7464e4"} Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.989162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c","Type":"ContainerStarted","Data":"95b649bc4db5f8c472ecd90e2c3467de02ac49bd6834e3ce74d8024106452c73"} Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.989190 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c","Type":"ContainerStarted","Data":"0b116183155462094279f8d3dff6921335fbeb21018a222dbd931fb3ab2cf0f8"} Nov 21 15:22:17 crc kubenswrapper[4774]: I1121 15:22:17.991083 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d","Type":"ContainerStarted","Data":"b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5"} Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021543 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b63eba9-23bf-47d2-8568-370d10b96150-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021648 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b63eba9-23bf-47d2-8568-370d10b96150-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021685 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021716 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021742 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021777 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021797 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtpnb\" (UniqueName: \"kubernetes.io/projected/6b63eba9-23bf-47d2-8568-370d10b96150-kube-api-access-dtpnb\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.021840 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b63eba9-23bf-47d2-8568-370d10b96150-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.022474 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6b63eba9-23bf-47d2-8568-370d10b96150-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.024599 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.028844 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.030600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6b63eba9-23bf-47d2-8568-370d10b96150-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.034893 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.034941 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/04a797c73d3120eebf15e0515f4f9ad27f5d3f6f0b7ded5d7d94f048c602b968/globalmount\"" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.047597 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b63eba9-23bf-47d2-8568-370d10b96150-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.047763 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b63eba9-23bf-47d2-8568-370d10b96150-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.060476 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtpnb\" (UniqueName: \"kubernetes.io/projected/6b63eba9-23bf-47d2-8568-370d10b96150-kube-api-access-dtpnb\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.095836 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9e9210e9-f329-45a8-ab32-0e11068292c4\") pod \"openstack-cell1-galera-0\" (UID: \"6b63eba9-23bf-47d2-8568-370d10b96150\") " pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.177799 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.714131 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.999680 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6b63eba9-23bf-47d2-8568-370d10b96150","Type":"ContainerStarted","Data":"67fc2110d85449cb6cf7a46eefd7302ab83d64579a7f6a02f61c37deb818fded"} Nov 21 15:22:18 crc kubenswrapper[4774]: I1121 15:22:18.999747 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6b63eba9-23bf-47d2-8568-370d10b96150","Type":"ContainerStarted","Data":"75a19b101a21f861cdb1ef0631abe10b1c36d33e5a9f63f2b640daf0e5b3793c"} Nov 21 15:22:19 crc kubenswrapper[4774]: I1121 15:22:19.022245 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=3.022228831 podStartE2EDuration="3.022228831s" podCreationTimestamp="2025-11-21 15:22:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:19.016618041 +0000 UTC m=+4729.668817310" watchObservedRunningTime="2025-11-21 15:22:19.022228831 +0000 UTC m=+4729.674428090" Nov 21 15:22:22 crc kubenswrapper[4774]: I1121 15:22:22.116194 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Nov 21 15:22:22 crc kubenswrapper[4774]: I1121 15:22:22.117249 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.019018 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.216060 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.307996 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66d656cccf-vqh4x"] Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.308215 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" podUID="019550b7-2420-4105-b39b-103219f766e3" containerName="dnsmasq-dns" containerID="cri-o://92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc" gracePeriod=10 Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.734999 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.829517 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2vqk\" (UniqueName: \"kubernetes.io/projected/019550b7-2420-4105-b39b-103219f766e3-kube-api-access-v2vqk\") pod \"019550b7-2420-4105-b39b-103219f766e3\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.829562 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-config\") pod \"019550b7-2420-4105-b39b-103219f766e3\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.829780 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-dns-svc\") pod \"019550b7-2420-4105-b39b-103219f766e3\" (UID: \"019550b7-2420-4105-b39b-103219f766e3\") " Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.841802 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/019550b7-2420-4105-b39b-103219f766e3-kube-api-access-v2vqk" (OuterVolumeSpecName: "kube-api-access-v2vqk") pod "019550b7-2420-4105-b39b-103219f766e3" (UID: "019550b7-2420-4105-b39b-103219f766e3"). InnerVolumeSpecName "kube-api-access-v2vqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.866289 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "019550b7-2420-4105-b39b-103219f766e3" (UID: "019550b7-2420-4105-b39b-103219f766e3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.871540 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-config" (OuterVolumeSpecName: "config") pod "019550b7-2420-4105-b39b-103219f766e3" (UID: "019550b7-2420-4105-b39b-103219f766e3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.931947 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.931979 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2vqk\" (UniqueName: \"kubernetes.io/projected/019550b7-2420-4105-b39b-103219f766e3-kube-api-access-v2vqk\") on node \"crc\" DevicePath \"\"" Nov 21 15:22:24 crc kubenswrapper[4774]: I1121 15:22:24.931990 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/019550b7-2420-4105-b39b-103219f766e3-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.054696 4774 generic.go:334] "Generic (PLEG): container finished" podID="14d4cc6c-e087-4c4c-8d38-2c0dce0d210c" containerID="95b649bc4db5f8c472ecd90e2c3467de02ac49bd6834e3ce74d8024106452c73" exitCode=0 Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.054776 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c","Type":"ContainerDied","Data":"95b649bc4db5f8c472ecd90e2c3467de02ac49bd6834e3ce74d8024106452c73"} Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.057403 4774 generic.go:334] "Generic (PLEG): container finished" podID="6b63eba9-23bf-47d2-8568-370d10b96150" containerID="67fc2110d85449cb6cf7a46eefd7302ab83d64579a7f6a02f61c37deb818fded" exitCode=0 Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.057514 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6b63eba9-23bf-47d2-8568-370d10b96150","Type":"ContainerDied","Data":"67fc2110d85449cb6cf7a46eefd7302ab83d64579a7f6a02f61c37deb818fded"} Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.062167 4774 generic.go:334] "Generic (PLEG): container finished" podID="019550b7-2420-4105-b39b-103219f766e3" containerID="92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc" exitCode=0 Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.062224 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.062228 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" event={"ID":"019550b7-2420-4105-b39b-103219f766e3","Type":"ContainerDied","Data":"92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc"} Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.062447 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-66d656cccf-vqh4x" event={"ID":"019550b7-2420-4105-b39b-103219f766e3","Type":"ContainerDied","Data":"ae1bffc6e3fa2cfc7fb96ffedba0497bd2ac20e77b88661d2c7c705df13c6b26"} Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.062692 4774 scope.go:117] "RemoveContainer" containerID="92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.094041 4774 scope.go:117] "RemoveContainer" containerID="a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.130098 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-66d656cccf-vqh4x"] Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.138212 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-66d656cccf-vqh4x"] Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.141993 4774 scope.go:117] "RemoveContainer" containerID="92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc" Nov 21 15:22:25 crc kubenswrapper[4774]: E1121 15:22:25.143422 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc\": container with ID starting with 92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc not found: ID does not exist" containerID="92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.143469 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc"} err="failed to get container status \"92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc\": rpc error: code = NotFound desc = could not find container \"92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc\": container with ID starting with 92fc8f41fdd48ee755fcd8e70cf4d5745f8c42dbae0f0651e37d2ef8ae8742fc not found: ID does not exist" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.143501 4774 scope.go:117] "RemoveContainer" containerID="a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37" Nov 21 15:22:25 crc kubenswrapper[4774]: E1121 15:22:25.143950 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37\": container with ID starting with a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37 not found: ID does not exist" containerID="a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37" Nov 21 15:22:25 crc kubenswrapper[4774]: I1121 15:22:25.143990 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37"} err="failed to get container status \"a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37\": rpc error: code = NotFound desc = could not find container \"a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37\": container with ID starting with a0d44dad6af2877b935ec0c9268f5a862495caf7e7b88a7e62682ab9d4cfdb37 not found: ID does not exist" Nov 21 15:22:26 crc kubenswrapper[4774]: I1121 15:22:26.073800 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"14d4cc6c-e087-4c4c-8d38-2c0dce0d210c","Type":"ContainerStarted","Data":"63f1b04f56888cffa5742cb3a0e211ddd7a6870b8fed47236b9cf41d02f86946"} Nov 21 15:22:26 crc kubenswrapper[4774]: I1121 15:22:26.075122 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"6b63eba9-23bf-47d2-8568-370d10b96150","Type":"ContainerStarted","Data":"71ad81bd49d5de6463c287bb2e79650a1bfe8f4c65ffa6b607990ed4c36d08d5"} Nov 21 15:22:26 crc kubenswrapper[4774]: I1121 15:22:26.110243 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=11.110218078 podStartE2EDuration="11.110218078s" podCreationTimestamp="2025-11-21 15:22:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:26.10258131 +0000 UTC m=+4736.754780589" watchObservedRunningTime="2025-11-21 15:22:26.110218078 +0000 UTC m=+4736.762417347" Nov 21 15:22:26 crc kubenswrapper[4774]: I1121 15:22:26.133644 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="019550b7-2420-4105-b39b-103219f766e3" path="/var/lib/kubelet/pods/019550b7-2420-4105-b39b-103219f766e3/volumes" Nov 21 15:22:26 crc kubenswrapper[4774]: I1121 15:22:26.147068 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=10.147045629 podStartE2EDuration="10.147045629s" podCreationTimestamp="2025-11-21 15:22:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:26.136786526 +0000 UTC m=+4736.788985795" watchObservedRunningTime="2025-11-21 15:22:26.147045629 +0000 UTC m=+4736.799244888" Nov 21 15:22:27 crc kubenswrapper[4774]: I1121 15:22:27.005650 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Nov 21 15:22:27 crc kubenswrapper[4774]: I1121 15:22:27.005714 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Nov 21 15:22:28 crc kubenswrapper[4774]: I1121 15:22:28.177990 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:28 crc kubenswrapper[4774]: I1121 15:22:28.178403 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:31 crc kubenswrapper[4774]: I1121 15:22:31.080083 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Nov 21 15:22:31 crc kubenswrapper[4774]: I1121 15:22:31.147142 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Nov 21 15:22:32 crc kubenswrapper[4774]: I1121 15:22:32.258710 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:32 crc kubenswrapper[4774]: I1121 15:22:32.326787 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Nov 21 15:22:49 crc kubenswrapper[4774]: I1121 15:22:49.288497 4774 generic.go:334] "Generic (PLEG): container finished" podID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerID="b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5" exitCode=0 Nov 21 15:22:49 crc kubenswrapper[4774]: I1121 15:22:49.288561 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d","Type":"ContainerDied","Data":"b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5"} Nov 21 15:22:49 crc kubenswrapper[4774]: I1121 15:22:49.292772 4774 generic.go:334] "Generic (PLEG): container finished" podID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerID="b247e4017a246f9b9f8698ac0059b194728d910484bd0f40acc276ae51f928b1" exitCode=0 Nov 21 15:22:49 crc kubenswrapper[4774]: I1121 15:22:49.292813 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8532c113-de51-49d6-9cd2-9e161b8e844f","Type":"ContainerDied","Data":"b247e4017a246f9b9f8698ac0059b194728d910484bd0f40acc276ae51f928b1"} Nov 21 15:22:50 crc kubenswrapper[4774]: I1121 15:22:50.301150 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8532c113-de51-49d6-9cd2-9e161b8e844f","Type":"ContainerStarted","Data":"0f92aa3ac48436cbb3cae14a2f5c8d4abe2887be6fc8596a0bc8f24ed26a55f0"} Nov 21 15:22:50 crc kubenswrapper[4774]: I1121 15:22:50.301718 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 21 15:22:50 crc kubenswrapper[4774]: I1121 15:22:50.303440 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d","Type":"ContainerStarted","Data":"896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3"} Nov 21 15:22:50 crc kubenswrapper[4774]: I1121 15:22:50.303630 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:22:50 crc kubenswrapper[4774]: I1121 15:22:50.335042 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.335017685 podStartE2EDuration="37.335017685s" podCreationTimestamp="2025-11-21 15:22:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:50.327045348 +0000 UTC m=+4760.979244597" watchObservedRunningTime="2025-11-21 15:22:50.335017685 +0000 UTC m=+4760.987216954" Nov 21 15:22:50 crc kubenswrapper[4774]: I1121 15:22:50.353422 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.35340339 podStartE2EDuration="36.35340339s" podCreationTimestamp="2025-11-21 15:22:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:22:50.347980705 +0000 UTC m=+4761.000179984" watchObservedRunningTime="2025-11-21 15:22:50.35340339 +0000 UTC m=+4761.005602649" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.007381 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d9c6c"] Nov 21 15:22:59 crc kubenswrapper[4774]: E1121 15:22:59.008504 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="019550b7-2420-4105-b39b-103219f766e3" containerName="dnsmasq-dns" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.008523 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="019550b7-2420-4105-b39b-103219f766e3" containerName="dnsmasq-dns" Nov 21 15:22:59 crc kubenswrapper[4774]: E1121 15:22:59.008550 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="019550b7-2420-4105-b39b-103219f766e3" containerName="init" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.008558 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="019550b7-2420-4105-b39b-103219f766e3" containerName="init" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.008765 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="019550b7-2420-4105-b39b-103219f766e3" containerName="dnsmasq-dns" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.011158 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.028050 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d9c6c"] Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.063694 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dckcz\" (UniqueName: \"kubernetes.io/projected/61ef2093-e396-46a6-94a8-69e7d40de50b-kube-api-access-dckcz\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.063781 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-catalog-content\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.063976 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-utilities\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.166041 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-utilities\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.166238 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dckcz\" (UniqueName: \"kubernetes.io/projected/61ef2093-e396-46a6-94a8-69e7d40de50b-kube-api-access-dckcz\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.166290 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-catalog-content\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.166690 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-utilities\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.167236 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-catalog-content\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.189124 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dckcz\" (UniqueName: \"kubernetes.io/projected/61ef2093-e396-46a6-94a8-69e7d40de50b-kube-api-access-dckcz\") pod \"community-operators-d9c6c\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.335276 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:22:59 crc kubenswrapper[4774]: I1121 15:22:59.835779 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d9c6c"] Nov 21 15:23:00 crc kubenswrapper[4774]: I1121 15:23:00.395008 4774 generic.go:334] "Generic (PLEG): container finished" podID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerID="b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53" exitCode=0 Nov 21 15:23:00 crc kubenswrapper[4774]: I1121 15:23:00.395110 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9c6c" event={"ID":"61ef2093-e396-46a6-94a8-69e7d40de50b","Type":"ContainerDied","Data":"b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53"} Nov 21 15:23:00 crc kubenswrapper[4774]: I1121 15:23:00.395172 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9c6c" event={"ID":"61ef2093-e396-46a6-94a8-69e7d40de50b","Type":"ContainerStarted","Data":"5ca9dbf5d944c95b103f4300082d37b759a70440d06e1480cc007e6ecd5a05b4"} Nov 21 15:23:02 crc kubenswrapper[4774]: I1121 15:23:02.411632 4774 generic.go:334] "Generic (PLEG): container finished" podID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerID="87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b" exitCode=0 Nov 21 15:23:02 crc kubenswrapper[4774]: I1121 15:23:02.411728 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9c6c" event={"ID":"61ef2093-e396-46a6-94a8-69e7d40de50b","Type":"ContainerDied","Data":"87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b"} Nov 21 15:23:04 crc kubenswrapper[4774]: I1121 15:23:04.852876 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9c6c" event={"ID":"61ef2093-e396-46a6-94a8-69e7d40de50b","Type":"ContainerStarted","Data":"a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a"} Nov 21 15:23:04 crc kubenswrapper[4774]: I1121 15:23:04.871906 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d9c6c" podStartSLOduration=4.448782242 podStartE2EDuration="6.87188283s" podCreationTimestamp="2025-11-21 15:22:58 +0000 UTC" firstStartedPulling="2025-11-21 15:23:00.39723762 +0000 UTC m=+4771.049436879" lastFinishedPulling="2025-11-21 15:23:02.820338208 +0000 UTC m=+4773.472537467" observedRunningTime="2025-11-21 15:23:04.868248886 +0000 UTC m=+4775.520448185" watchObservedRunningTime="2025-11-21 15:23:04.87188283 +0000 UTC m=+4775.524082109" Nov 21 15:23:05 crc kubenswrapper[4774]: I1121 15:23:05.136992 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 21 15:23:05 crc kubenswrapper[4774]: I1121 15:23:05.602035 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:09 crc kubenswrapper[4774]: I1121 15:23:09.335613 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:23:09 crc kubenswrapper[4774]: I1121 15:23:09.336480 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:23:09 crc kubenswrapper[4774]: I1121 15:23:09.384427 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:23:09 crc kubenswrapper[4774]: I1121 15:23:09.954210 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.026193 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-665ff86d95-rmd5z"] Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.027926 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.046918 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d9c6c"] Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.051356 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665ff86d95-rmd5z"] Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.124612 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-config\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.124674 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-dns-svc\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.124715 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5kxp\" (UniqueName: \"kubernetes.io/projected/844ca634-53e5-4246-880b-96811b359e81-kube-api-access-x5kxp\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.226195 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-config\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.226266 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-dns-svc\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.226309 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5kxp\" (UniqueName: \"kubernetes.io/projected/844ca634-53e5-4246-880b-96811b359e81-kube-api-access-x5kxp\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.227525 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-config\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.227844 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-dns-svc\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.243689 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5kxp\" (UniqueName: \"kubernetes.io/projected/844ca634-53e5-4246-880b-96811b359e81-kube-api-access-x5kxp\") pod \"dnsmasq-dns-665ff86d95-rmd5z\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.349513 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.722304 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.827396 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-665ff86d95-rmd5z"] Nov 21 15:23:10 crc kubenswrapper[4774]: I1121 15:23:10.903104 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" event={"ID":"844ca634-53e5-4246-880b-96811b359e81","Type":"ContainerStarted","Data":"cf8803009b5dcc157944ac75fa64469b1acc899e82ed3e9872a7d7878a81525f"} Nov 21 15:23:11 crc kubenswrapper[4774]: I1121 15:23:11.201495 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:23:11 crc kubenswrapper[4774]: I1121 15:23:11.912158 4774 generic.go:334] "Generic (PLEG): container finished" podID="844ca634-53e5-4246-880b-96811b359e81" containerID="48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a" exitCode=0 Nov 21 15:23:11 crc kubenswrapper[4774]: I1121 15:23:11.912266 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" event={"ID":"844ca634-53e5-4246-880b-96811b359e81","Type":"ContainerDied","Data":"48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a"} Nov 21 15:23:11 crc kubenswrapper[4774]: I1121 15:23:11.912448 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d9c6c" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="registry-server" containerID="cri-o://a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a" gracePeriod=2 Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.315960 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.456367 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-utilities\") pod \"61ef2093-e396-46a6-94a8-69e7d40de50b\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.456468 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dckcz\" (UniqueName: \"kubernetes.io/projected/61ef2093-e396-46a6-94a8-69e7d40de50b-kube-api-access-dckcz\") pod \"61ef2093-e396-46a6-94a8-69e7d40de50b\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.456540 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-catalog-content\") pod \"61ef2093-e396-46a6-94a8-69e7d40de50b\" (UID: \"61ef2093-e396-46a6-94a8-69e7d40de50b\") " Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.457381 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-utilities" (OuterVolumeSpecName: "utilities") pod "61ef2093-e396-46a6-94a8-69e7d40de50b" (UID: "61ef2093-e396-46a6-94a8-69e7d40de50b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.463410 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61ef2093-e396-46a6-94a8-69e7d40de50b-kube-api-access-dckcz" (OuterVolumeSpecName: "kube-api-access-dckcz") pod "61ef2093-e396-46a6-94a8-69e7d40de50b" (UID: "61ef2093-e396-46a6-94a8-69e7d40de50b"). InnerVolumeSpecName "kube-api-access-dckcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.557919 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.557957 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dckcz\" (UniqueName: \"kubernetes.io/projected/61ef2093-e396-46a6-94a8-69e7d40de50b-kube-api-access-dckcz\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.569912 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="rabbitmq" containerID="cri-o://0f92aa3ac48436cbb3cae14a2f5c8d4abe2887be6fc8596a0bc8f24ed26a55f0" gracePeriod=604799 Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.766691 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "61ef2093-e396-46a6-94a8-69e7d40de50b" (UID: "61ef2093-e396-46a6-94a8-69e7d40de50b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.862459 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/61ef2093-e396-46a6-94a8-69e7d40de50b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.920099 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" event={"ID":"844ca634-53e5-4246-880b-96811b359e81","Type":"ContainerStarted","Data":"c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a"} Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.922085 4774 generic.go:334] "Generic (PLEG): container finished" podID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerID="a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a" exitCode=0 Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.922129 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9c6c" event={"ID":"61ef2093-e396-46a6-94a8-69e7d40de50b","Type":"ContainerDied","Data":"a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a"} Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.922158 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d9c6c" event={"ID":"61ef2093-e396-46a6-94a8-69e7d40de50b","Type":"ContainerDied","Data":"5ca9dbf5d944c95b103f4300082d37b759a70440d06e1480cc007e6ecd5a05b4"} Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.922165 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d9c6c" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.922174 4774 scope.go:117] "RemoveContainer" containerID="a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.942797 4774 scope.go:117] "RemoveContainer" containerID="87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.970511 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" podStartSLOduration=2.97048957 podStartE2EDuration="2.97048957s" podCreationTimestamp="2025-11-21 15:23:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:23:12.952655521 +0000 UTC m=+4783.604854800" watchObservedRunningTime="2025-11-21 15:23:12.97048957 +0000 UTC m=+4783.622688839" Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.972550 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d9c6c"] Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.992660 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d9c6c"] Nov 21 15:23:12 crc kubenswrapper[4774]: I1121 15:23:12.993489 4774 scope.go:117] "RemoveContainer" containerID="b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.010840 4774 scope.go:117] "RemoveContainer" containerID="a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a" Nov 21 15:23:13 crc kubenswrapper[4774]: E1121 15:23:13.011279 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a\": container with ID starting with a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a not found: ID does not exist" containerID="a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.011321 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a"} err="failed to get container status \"a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a\": rpc error: code = NotFound desc = could not find container \"a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a\": container with ID starting with a8467dd2956ca5ce3b17442438db93adad97079f0e34ff8883b20a594159b97a not found: ID does not exist" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.011369 4774 scope.go:117] "RemoveContainer" containerID="87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b" Nov 21 15:23:13 crc kubenswrapper[4774]: E1121 15:23:13.011752 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b\": container with ID starting with 87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b not found: ID does not exist" containerID="87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.011904 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b"} err="failed to get container status \"87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b\": rpc error: code = NotFound desc = could not find container \"87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b\": container with ID starting with 87568f568f218b82291f766afcc900e5edd68b3c1cd972dbe9a814860ef11a9b not found: ID does not exist" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.011929 4774 scope.go:117] "RemoveContainer" containerID="b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53" Nov 21 15:23:13 crc kubenswrapper[4774]: E1121 15:23:13.013720 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53\": container with ID starting with b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53 not found: ID does not exist" containerID="b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.013774 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53"} err="failed to get container status \"b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53\": rpc error: code = NotFound desc = could not find container \"b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53\": container with ID starting with b86fb325aa63249bd56d8574f489b4d38e72c7e52fce01f69e9d0e0b3426fa53 not found: ID does not exist" Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.027152 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="rabbitmq" containerID="cri-o://896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3" gracePeriod=604799 Nov 21 15:23:13 crc kubenswrapper[4774]: I1121 15:23:13.931553 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:14 crc kubenswrapper[4774]: I1121 15:23:14.101451 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" path="/var/lib/kubelet/pods/61ef2093-e396-46a6-94a8-69e7d40de50b/volumes" Nov 21 15:23:15 crc kubenswrapper[4774]: I1121 15:23:15.135594 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.239:5672: connect: connection refused" Nov 21 15:23:15 crc kubenswrapper[4774]: I1121 15:23:15.600718 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.240:5672: connect: connection refused" Nov 21 15:23:18 crc kubenswrapper[4774]: I1121 15:23:18.974194 4774 generic.go:334] "Generic (PLEG): container finished" podID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerID="0f92aa3ac48436cbb3cae14a2f5c8d4abe2887be6fc8596a0bc8f24ed26a55f0" exitCode=0 Nov 21 15:23:18 crc kubenswrapper[4774]: I1121 15:23:18.974463 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8532c113-de51-49d6-9cd2-9e161b8e844f","Type":"ContainerDied","Data":"0f92aa3ac48436cbb3cae14a2f5c8d4abe2887be6fc8596a0bc8f24ed26a55f0"} Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.292849 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.364619 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-confd\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.364966 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-plugins\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365011 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8532c113-de51-49d6-9cd2-9e161b8e844f-erlang-cookie-secret\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365105 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-server-conf\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365155 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-plugins-conf\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365321 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365376 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8532c113-de51-49d6-9cd2-9e161b8e844f-pod-info\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365418 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wtd6\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-kube-api-access-6wtd6\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.365478 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-erlang-cookie\") pod \"8532c113-de51-49d6-9cd2-9e161b8e844f\" (UID: \"8532c113-de51-49d6-9cd2-9e161b8e844f\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.367410 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.370537 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.373896 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.395138 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8532c113-de51-49d6-9cd2-9e161b8e844f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.395273 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-kube-api-access-6wtd6" (OuterVolumeSpecName: "kube-api-access-6wtd6") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "kube-api-access-6wtd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.395369 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df" (OuterVolumeSpecName: "persistence") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "pvc-55fa9486-c50a-4349-8465-17b561ece9df". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.395803 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-server-conf" (OuterVolumeSpecName: "server-conf") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.401580 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8532c113-de51-49d6-9cd2-9e161b8e844f-pod-info" (OuterVolumeSpecName: "pod-info") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.466951 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wtd6\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-kube-api-access-6wtd6\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.466994 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.467009 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.467022 4774 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8532c113-de51-49d6-9cd2-9e161b8e844f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.467036 4774 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-server-conf\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.467048 4774 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8532c113-de51-49d6-9cd2-9e161b8e844f-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.467090 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") on node \"crc\" " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.467109 4774 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8532c113-de51-49d6-9cd2-9e161b8e844f-pod-info\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.491721 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8532c113-de51-49d6-9cd2-9e161b8e844f" (UID: "8532c113-de51-49d6-9cd2-9e161b8e844f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.500748 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.505560 4774 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.505756 4774 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-55fa9486-c50a-4349-8465-17b561ece9df" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df") on node "crc" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.568869 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.568946 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-pod-info\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.568972 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-plugins-conf\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569012 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-server-conf\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569098 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-confd\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569126 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-247zl\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-kube-api-access-247zl\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569170 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-plugins\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569215 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-erlang-cookie\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569271 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-erlang-cookie-secret\") pod \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\" (UID: \"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d\") " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569686 4774 reconciler_common.go:293] "Volume detached for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.569730 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8532c113-de51-49d6-9cd2-9e161b8e844f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.571790 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.574396 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.574650 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.574752 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.574975 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-pod-info" (OuterVolumeSpecName: "pod-info") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.576598 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-kube-api-access-247zl" (OuterVolumeSpecName: "kube-api-access-247zl") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "kube-api-access-247zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.585481 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63" (OuterVolumeSpecName: "persistence") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "pvc-a44fa96b-8ebd-46d4-9690-48b688794a63". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.591469 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-server-conf" (OuterVolumeSpecName: "server-conf") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.663124 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" (UID: "e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.670960 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.670993 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-247zl\" (UniqueName: \"kubernetes.io/projected/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-kube-api-access-247zl\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671005 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671014 4774 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671023 4774 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671058 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") on node \"crc\" " Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671072 4774 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-pod-info\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671080 4774 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.671088 4774 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d-server-conf\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.688111 4774 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.688342 4774 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-a44fa96b-8ebd-46d4-9690-48b688794a63" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63") on node "crc" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.772599 4774 reconciler_common.go:293] "Volume detached for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.989260 4774 generic.go:334] "Generic (PLEG): container finished" podID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerID="896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3" exitCode=0 Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.989343 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d","Type":"ContainerDied","Data":"896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3"} Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.989375 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.989618 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d","Type":"ContainerDied","Data":"aa9c0c6cab04657956f143a584a94088b77d9c163fdeb441b70452b76e778fb4"} Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.989677 4774 scope.go:117] "RemoveContainer" containerID="896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3" Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.993272 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"8532c113-de51-49d6-9cd2-9e161b8e844f","Type":"ContainerDied","Data":"aeb4ab6f04d1fe363a6f4c8a3bc505cd700e0e32c37ce917217f5f3aba30ba61"} Nov 21 15:23:19 crc kubenswrapper[4774]: I1121 15:23:19.993410 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.016579 4774 scope.go:117] "RemoveContainer" containerID="b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.036248 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.044844 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.059172 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.070598 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.070927 4774 scope.go:117] "RemoveContainer" containerID="896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.071370 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3\": container with ID starting with 896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3 not found: ID does not exist" containerID="896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.071402 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3"} err="failed to get container status \"896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3\": rpc error: code = NotFound desc = could not find container \"896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3\": container with ID starting with 896a4858fb9c3a6111c16318a7a5df8b0fff375ea0396821ad4c5c800dff14c3 not found: ID does not exist" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.071424 4774 scope.go:117] "RemoveContainer" containerID="b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.071689 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5\": container with ID starting with b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5 not found: ID does not exist" containerID="b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.071729 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5"} err="failed to get container status \"b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5\": rpc error: code = NotFound desc = could not find container \"b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5\": container with ID starting with b59c816f121b463103778cf735ebb4b940efcb0056e97633491fc46ba4fe54a5 not found: ID does not exist" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.071762 4774 scope.go:117] "RemoveContainer" containerID="0f92aa3ac48436cbb3cae14a2f5c8d4abe2887be6fc8596a0bc8f24ed26a55f0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078487 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078838 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="extract-content" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078850 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="extract-content" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078860 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="rabbitmq" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078866 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="rabbitmq" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078878 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="setup-container" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078884 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="setup-container" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078898 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="registry-server" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078903 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="registry-server" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078921 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="rabbitmq" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078927 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="rabbitmq" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078935 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="setup-container" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078941 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="setup-container" Nov 21 15:23:20 crc kubenswrapper[4774]: E1121 15:23:20.078955 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="extract-utilities" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.078961 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="extract-utilities" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.079101 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" containerName="rabbitmq" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.079114 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" containerName="rabbitmq" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.079131 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="61ef2093-e396-46a6-94a8-69e7d40de50b" containerName="registry-server" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.080201 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.084303 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.084634 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.084920 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.085120 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-44k7v" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.085345 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.112244 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8532c113-de51-49d6-9cd2-9e161b8e844f" path="/var/lib/kubelet/pods/8532c113-de51-49d6-9cd2-9e161b8e844f/volumes" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.112998 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d" path="/var/lib/kubelet/pods/e45463a2-18f5-41f0-a1cb-c5ae16ad7b3d/volumes" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.113674 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.115017 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.115314 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.118057 4774 scope.go:117] "RemoveContainer" containerID="b247e4017a246f9b9f8698ac0059b194728d910484bd0f40acc276ae51f928b1" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.118254 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.123693 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.118309 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.118337 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.118365 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-869bx" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.118369 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.178769 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/552d8f65-7177-4f9b-a454-a31a1528b17f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.178870 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4rfh\" (UniqueName: \"kubernetes.io/projected/2ee0feed-380f-455b-be85-6eae06c085e7-kube-api-access-v4rfh\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.178931 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.178955 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179204 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179230 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/552d8f65-7177-4f9b-a454-a31a1528b17f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179278 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ee0feed-380f-455b-be85-6eae06c085e7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179304 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179347 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179378 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/552d8f65-7177-4f9b-a454-a31a1528b17f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179433 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179455 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179481 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ee0feed-380f-455b-be85-6eae06c085e7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179501 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179527 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wnd8\" (UniqueName: \"kubernetes.io/projected/552d8f65-7177-4f9b-a454-a31a1528b17f-kube-api-access-2wnd8\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179549 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/552d8f65-7177-4f9b-a454-a31a1528b17f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179567 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ee0feed-380f-455b-be85-6eae06c085e7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.179583 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ee0feed-380f-455b-be85-6eae06c085e7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.280876 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/552d8f65-7177-4f9b-a454-a31a1528b17f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.280951 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4rfh\" (UniqueName: \"kubernetes.io/projected/2ee0feed-380f-455b-be85-6eae06c085e7-kube-api-access-v4rfh\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.280990 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281041 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281070 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/552d8f65-7177-4f9b-a454-a31a1528b17f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281091 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ee0feed-380f-455b-be85-6eae06c085e7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281118 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281143 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281182 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/552d8f65-7177-4f9b-a454-a31a1528b17f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281208 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281235 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281265 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ee0feed-380f-455b-be85-6eae06c085e7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281318 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281342 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wnd8\" (UniqueName: \"kubernetes.io/projected/552d8f65-7177-4f9b-a454-a31a1528b17f-kube-api-access-2wnd8\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281362 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/552d8f65-7177-4f9b-a454-a31a1528b17f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281377 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ee0feed-380f-455b-be85-6eae06c085e7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281394 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ee0feed-380f-455b-be85-6eae06c085e7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.281745 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.282027 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.282421 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/552d8f65-7177-4f9b-a454-a31a1528b17f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.282620 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.282655 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/552d8f65-7177-4f9b-a454-a31a1528b17f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.283622 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.285574 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2ee0feed-380f-455b-be85-6eae06c085e7-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.286903 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2ee0feed-380f-455b-be85-6eae06c085e7-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.286922 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2ee0feed-380f-455b-be85-6eae06c085e7-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.287100 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2ee0feed-380f-455b-be85-6eae06c085e7-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.287272 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.287299 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.287301 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6b4630e21b2bb3e6513d4f380d3716c3b1abd0736fd9075181f25a5637de358/globalmount\"" pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.287324 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c37317031bf95367ef9f439d6aac52a5ab67351d92ce77de835f97768d8da840/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.287920 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2ee0feed-380f-455b-be85-6eae06c085e7-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.289816 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/552d8f65-7177-4f9b-a454-a31a1528b17f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.289986 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/552d8f65-7177-4f9b-a454-a31a1528b17f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.292323 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/552d8f65-7177-4f9b-a454-a31a1528b17f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.298882 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4rfh\" (UniqueName: \"kubernetes.io/projected/2ee0feed-380f-455b-be85-6eae06c085e7-kube-api-access-v4rfh\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.303441 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wnd8\" (UniqueName: \"kubernetes.io/projected/552d8f65-7177-4f9b-a454-a31a1528b17f-kube-api-access-2wnd8\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.314398 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-55fa9486-c50a-4349-8465-17b561ece9df\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-55fa9486-c50a-4349-8465-17b561ece9df\") pod \"rabbitmq-server-0\" (UID: \"552d8f65-7177-4f9b-a454-a31a1528b17f\") " pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.316067 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a44fa96b-8ebd-46d4-9690-48b688794a63\") pod \"rabbitmq-cell1-server-0\" (UID: \"2ee0feed-380f-455b-be85-6eae06c085e7\") " pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.351943 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.416154 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.420411 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-777df6d877-mdvmt"] Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.420751 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerName="dnsmasq-dns" containerID="cri-o://3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446" gracePeriod=10 Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.434281 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.877767 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:23:20 crc kubenswrapper[4774]: I1121 15:23:20.999939 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czv8z\" (UniqueName: \"kubernetes.io/projected/cd686dda-40cb-412e-96b2-2a40efe1b4f7-kube-api-access-czv8z\") pod \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.000052 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-config\") pod \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.000109 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-dns-svc\") pod \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\" (UID: \"cd686dda-40cb-412e-96b2-2a40efe1b4f7\") " Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.003531 4774 generic.go:334] "Generic (PLEG): container finished" podID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerID="3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446" exitCode=0 Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.003622 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.003631 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" event={"ID":"cd686dda-40cb-412e-96b2-2a40efe1b4f7","Type":"ContainerDied","Data":"3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446"} Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.003852 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-777df6d877-mdvmt" event={"ID":"cd686dda-40cb-412e-96b2-2a40efe1b4f7","Type":"ContainerDied","Data":"8a5bee76da33b7475eeaed1cf3235fb904ff738f51c5e558545b15b2d357f3a8"} Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.003905 4774 scope.go:117] "RemoveContainer" containerID="3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.006417 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd686dda-40cb-412e-96b2-2a40efe1b4f7-kube-api-access-czv8z" (OuterVolumeSpecName: "kube-api-access-czv8z") pod "cd686dda-40cb-412e-96b2-2a40efe1b4f7" (UID: "cd686dda-40cb-412e-96b2-2a40efe1b4f7"). InnerVolumeSpecName "kube-api-access-czv8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.039717 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cd686dda-40cb-412e-96b2-2a40efe1b4f7" (UID: "cd686dda-40cb-412e-96b2-2a40efe1b4f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.042162 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-config" (OuterVolumeSpecName: "config") pod "cd686dda-40cb-412e-96b2-2a40efe1b4f7" (UID: "cd686dda-40cb-412e-96b2-2a40efe1b4f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.074810 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.095250 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Nov 21 15:23:21 crc kubenswrapper[4774]: W1121 15:23:21.099404 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod552d8f65_7177_4f9b_a454_a31a1528b17f.slice/crio-5f5fac9e8542f08ba3f61657a3a318a1891c1d888c4407652d94c5e601ea2e6f WatchSource:0}: Error finding container 5f5fac9e8542f08ba3f61657a3a318a1891c1d888c4407652d94c5e601ea2e6f: Status 404 returned error can't find the container with id 5f5fac9e8542f08ba3f61657a3a318a1891c1d888c4407652d94c5e601ea2e6f Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.101412 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czv8z\" (UniqueName: \"kubernetes.io/projected/cd686dda-40cb-412e-96b2-2a40efe1b4f7-kube-api-access-czv8z\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.101438 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.101452 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cd686dda-40cb-412e-96b2-2a40efe1b4f7-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.136303 4774 scope.go:117] "RemoveContainer" containerID="c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.157581 4774 scope.go:117] "RemoveContainer" containerID="3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446" Nov 21 15:23:21 crc kubenswrapper[4774]: E1121 15:23:21.158136 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446\": container with ID starting with 3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446 not found: ID does not exist" containerID="3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.158192 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446"} err="failed to get container status \"3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446\": rpc error: code = NotFound desc = could not find container \"3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446\": container with ID starting with 3649572c1af5ed1ba0dcb480d64a7cf59909f9f3d55cc66837926c54aec06446 not found: ID does not exist" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.158226 4774 scope.go:117] "RemoveContainer" containerID="c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2" Nov 21 15:23:21 crc kubenswrapper[4774]: E1121 15:23:21.159326 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2\": container with ID starting with c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2 not found: ID does not exist" containerID="c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.159377 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2"} err="failed to get container status \"c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2\": rpc error: code = NotFound desc = could not find container \"c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2\": container with ID starting with c57fe689546befa43af6f4b9155c668ca02db9a8a87e66fb45e4323d4d419ad2 not found: ID does not exist" Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.335059 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-777df6d877-mdvmt"] Nov 21 15:23:21 crc kubenswrapper[4774]: I1121 15:23:21.339247 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-777df6d877-mdvmt"] Nov 21 15:23:22 crc kubenswrapper[4774]: I1121 15:23:22.015252 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2ee0feed-380f-455b-be85-6eae06c085e7","Type":"ContainerStarted","Data":"d37dba3e16beb33e8d59222f7745329fef1e897ca032786a75e868c313553211"} Nov 21 15:23:22 crc kubenswrapper[4774]: I1121 15:23:22.018983 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"552d8f65-7177-4f9b-a454-a31a1528b17f","Type":"ContainerStarted","Data":"5f5fac9e8542f08ba3f61657a3a318a1891c1d888c4407652d94c5e601ea2e6f"} Nov 21 15:23:22 crc kubenswrapper[4774]: I1121 15:23:22.105041 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" path="/var/lib/kubelet/pods/cd686dda-40cb-412e-96b2-2a40efe1b4f7/volumes" Nov 21 15:23:23 crc kubenswrapper[4774]: I1121 15:23:23.030237 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"552d8f65-7177-4f9b-a454-a31a1528b17f","Type":"ContainerStarted","Data":"d9ba3a1189757031b8a97ccc0bfbf60c0203e3b6305f2e35f62b6d622de68a18"} Nov 21 15:23:23 crc kubenswrapper[4774]: I1121 15:23:23.035093 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2ee0feed-380f-455b-be85-6eae06c085e7","Type":"ContainerStarted","Data":"9a34c35613de1d3f6194f1d0fda5f925abc8359254f2a8a56cef292a9d61bff4"} Nov 21 15:23:55 crc kubenswrapper[4774]: I1121 15:23:55.287485 4774 generic.go:334] "Generic (PLEG): container finished" podID="552d8f65-7177-4f9b-a454-a31a1528b17f" containerID="d9ba3a1189757031b8a97ccc0bfbf60c0203e3b6305f2e35f62b6d622de68a18" exitCode=0 Nov 21 15:23:55 crc kubenswrapper[4774]: I1121 15:23:55.287622 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"552d8f65-7177-4f9b-a454-a31a1528b17f","Type":"ContainerDied","Data":"d9ba3a1189757031b8a97ccc0bfbf60c0203e3b6305f2e35f62b6d622de68a18"} Nov 21 15:23:55 crc kubenswrapper[4774]: I1121 15:23:55.290106 4774 generic.go:334] "Generic (PLEG): container finished" podID="2ee0feed-380f-455b-be85-6eae06c085e7" containerID="9a34c35613de1d3f6194f1d0fda5f925abc8359254f2a8a56cef292a9d61bff4" exitCode=0 Nov 21 15:23:55 crc kubenswrapper[4774]: I1121 15:23:55.290156 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2ee0feed-380f-455b-be85-6eae06c085e7","Type":"ContainerDied","Data":"9a34c35613de1d3f6194f1d0fda5f925abc8359254f2a8a56cef292a9d61bff4"} Nov 21 15:23:56 crc kubenswrapper[4774]: I1121 15:23:56.298759 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"552d8f65-7177-4f9b-a454-a31a1528b17f","Type":"ContainerStarted","Data":"d2b710adf74dbe8444da9f8d4a73c4f49f123d9c0892ac7b005bacb0d20e3785"} Nov 21 15:23:56 crc kubenswrapper[4774]: I1121 15:23:56.299248 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Nov 21 15:23:56 crc kubenswrapper[4774]: I1121 15:23:56.300793 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2ee0feed-380f-455b-be85-6eae06c085e7","Type":"ContainerStarted","Data":"0aea1c4fe17ae9348f53cd3659ea77dd9b6c28d1c1226c57d6444dc1f7f0f80e"} Nov 21 15:23:56 crc kubenswrapper[4774]: I1121 15:23:56.300983 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:23:56 crc kubenswrapper[4774]: I1121 15:23:56.343195 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.343177514 podStartE2EDuration="36.343177514s" podCreationTimestamp="2025-11-21 15:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:23:56.334467605 +0000 UTC m=+4826.986666864" watchObservedRunningTime="2025-11-21 15:23:56.343177514 +0000 UTC m=+4826.995376773" Nov 21 15:23:56 crc kubenswrapper[4774]: I1121 15:23:56.365450 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.365426079 podStartE2EDuration="36.365426079s" podCreationTimestamp="2025-11-21 15:23:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:23:56.359363536 +0000 UTC m=+4827.011562805" watchObservedRunningTime="2025-11-21 15:23:56.365426079 +0000 UTC m=+4827.017625338" Nov 21 15:24:10 crc kubenswrapper[4774]: I1121 15:24:10.419684 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Nov 21 15:24:10 crc kubenswrapper[4774]: I1121 15:24:10.437306 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.796401 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1-default"] Nov 21 15:24:21 crc kubenswrapper[4774]: E1121 15:24:21.797463 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerName="dnsmasq-dns" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.797485 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerName="dnsmasq-dns" Nov 21 15:24:21 crc kubenswrapper[4774]: E1121 15:24:21.797533 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerName="init" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.797548 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerName="init" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.797861 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd686dda-40cb-412e-96b2-2a40efe1b4f7" containerName="dnsmasq-dns" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.798662 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.800666 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-hbfp7" Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.805403 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 21 15:24:21 crc kubenswrapper[4774]: I1121 15:24:21.932223 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x7db\" (UniqueName: \"kubernetes.io/projected/9b101815-e43b-4630-abdf-8b8253debff3-kube-api-access-8x7db\") pod \"mariadb-client-1-default\" (UID: \"9b101815-e43b-4630-abdf-8b8253debff3\") " pod="openstack/mariadb-client-1-default" Nov 21 15:24:22 crc kubenswrapper[4774]: I1121 15:24:22.034135 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x7db\" (UniqueName: \"kubernetes.io/projected/9b101815-e43b-4630-abdf-8b8253debff3-kube-api-access-8x7db\") pod \"mariadb-client-1-default\" (UID: \"9b101815-e43b-4630-abdf-8b8253debff3\") " pod="openstack/mariadb-client-1-default" Nov 21 15:24:22 crc kubenswrapper[4774]: I1121 15:24:22.070189 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x7db\" (UniqueName: \"kubernetes.io/projected/9b101815-e43b-4630-abdf-8b8253debff3-kube-api-access-8x7db\") pod \"mariadb-client-1-default\" (UID: \"9b101815-e43b-4630-abdf-8b8253debff3\") " pod="openstack/mariadb-client-1-default" Nov 21 15:24:22 crc kubenswrapper[4774]: I1121 15:24:22.151650 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 21 15:24:22 crc kubenswrapper[4774]: I1121 15:24:22.675182 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 21 15:24:22 crc kubenswrapper[4774]: I1121 15:24:22.683357 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:24:23 crc kubenswrapper[4774]: I1121 15:24:23.494162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"9b101815-e43b-4630-abdf-8b8253debff3","Type":"ContainerStarted","Data":"62f982f149d8b8089312305f53bdb94d58197ddd5e38cf9923a38c0e5c1d8282"} Nov 21 15:24:27 crc kubenswrapper[4774]: I1121 15:24:27.522450 4774 generic.go:334] "Generic (PLEG): container finished" podID="9b101815-e43b-4630-abdf-8b8253debff3" containerID="17400af807a23db985ade6fc7122c89b948c7c1858062cc13f5f050c0d2e1268" exitCode=0 Nov 21 15:24:27 crc kubenswrapper[4774]: I1121 15:24:27.522543 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1-default" event={"ID":"9b101815-e43b-4630-abdf-8b8253debff3","Type":"ContainerDied","Data":"17400af807a23db985ade6fc7122c89b948c7c1858062cc13f5f050c0d2e1268"} Nov 21 15:24:28 crc kubenswrapper[4774]: I1121 15:24:28.867381 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 21 15:24:28 crc kubenswrapper[4774]: I1121 15:24:28.895377 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1-default_9b101815-e43b-4630-abdf-8b8253debff3/mariadb-client-1-default/0.log" Nov 21 15:24:28 crc kubenswrapper[4774]: I1121 15:24:28.919421 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 21 15:24:28 crc kubenswrapper[4774]: I1121 15:24:28.924274 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1-default"] Nov 21 15:24:28 crc kubenswrapper[4774]: I1121 15:24:28.939868 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x7db\" (UniqueName: \"kubernetes.io/projected/9b101815-e43b-4630-abdf-8b8253debff3-kube-api-access-8x7db\") pod \"9b101815-e43b-4630-abdf-8b8253debff3\" (UID: \"9b101815-e43b-4630-abdf-8b8253debff3\") " Nov 21 15:24:28 crc kubenswrapper[4774]: I1121 15:24:28.948885 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b101815-e43b-4630-abdf-8b8253debff3-kube-api-access-8x7db" (OuterVolumeSpecName: "kube-api-access-8x7db") pod "9b101815-e43b-4630-abdf-8b8253debff3" (UID: "9b101815-e43b-4630-abdf-8b8253debff3"). InnerVolumeSpecName "kube-api-access-8x7db". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.041272 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x7db\" (UniqueName: \"kubernetes.io/projected/9b101815-e43b-4630-abdf-8b8253debff3-kube-api-access-8x7db\") on node \"crc\" DevicePath \"\"" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.325570 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2-default"] Nov 21 15:24:29 crc kubenswrapper[4774]: E1121 15:24:29.325992 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b101815-e43b-4630-abdf-8b8253debff3" containerName="mariadb-client-1-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.326014 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b101815-e43b-4630-abdf-8b8253debff3" containerName="mariadb-client-1-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.326182 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b101815-e43b-4630-abdf-8b8253debff3" containerName="mariadb-client-1-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.327043 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.342221 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.447302 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjfj6\" (UniqueName: \"kubernetes.io/projected/da7ac7ef-fc06-4ab2-8565-10c440aa14dc-kube-api-access-jjfj6\") pod \"mariadb-client-2-default\" (UID: \"da7ac7ef-fc06-4ab2-8565-10c440aa14dc\") " pod="openstack/mariadb-client-2-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.541340 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62f982f149d8b8089312305f53bdb94d58197ddd5e38cf9923a38c0e5c1d8282" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.541372 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.549222 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjfj6\" (UniqueName: \"kubernetes.io/projected/da7ac7ef-fc06-4ab2-8565-10c440aa14dc-kube-api-access-jjfj6\") pod \"mariadb-client-2-default\" (UID: \"da7ac7ef-fc06-4ab2-8565-10c440aa14dc\") " pod="openstack/mariadb-client-2-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.567257 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjfj6\" (UniqueName: \"kubernetes.io/projected/da7ac7ef-fc06-4ab2-8565-10c440aa14dc-kube-api-access-jjfj6\") pod \"mariadb-client-2-default\" (UID: \"da7ac7ef-fc06-4ab2-8565-10c440aa14dc\") " pod="openstack/mariadb-client-2-default" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.600590 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.600644 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:24:29 crc kubenswrapper[4774]: I1121 15:24:29.645374 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 21 15:24:30 crc kubenswrapper[4774]: I1121 15:24:30.105172 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b101815-e43b-4630-abdf-8b8253debff3" path="/var/lib/kubelet/pods/9b101815-e43b-4630-abdf-8b8253debff3/volumes" Nov 21 15:24:30 crc kubenswrapper[4774]: I1121 15:24:30.139204 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 21 15:24:30 crc kubenswrapper[4774]: W1121 15:24:30.140862 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda7ac7ef_fc06_4ab2_8565_10c440aa14dc.slice/crio-841050e1ff05cd14d1bfd18c42058671ddaa81836fa3252612216bf092319fdb WatchSource:0}: Error finding container 841050e1ff05cd14d1bfd18c42058671ddaa81836fa3252612216bf092319fdb: Status 404 returned error can't find the container with id 841050e1ff05cd14d1bfd18c42058671ddaa81836fa3252612216bf092319fdb Nov 21 15:24:30 crc kubenswrapper[4774]: I1121 15:24:30.550264 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"da7ac7ef-fc06-4ab2-8565-10c440aa14dc","Type":"ContainerStarted","Data":"c7339e43433862310352470391f84aa65906c0e81082b0836ceea13b28fd95d5"} Nov 21 15:24:30 crc kubenswrapper[4774]: I1121 15:24:30.550776 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"da7ac7ef-fc06-4ab2-8565-10c440aa14dc","Type":"ContainerStarted","Data":"841050e1ff05cd14d1bfd18c42058671ddaa81836fa3252612216bf092319fdb"} Nov 21 15:24:30 crc kubenswrapper[4774]: I1121 15:24:30.575654 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-2-default" podStartSLOduration=1.5756338090000002 podStartE2EDuration="1.575633809s" podCreationTimestamp="2025-11-21 15:24:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:24:30.566053237 +0000 UTC m=+4861.218252526" watchObservedRunningTime="2025-11-21 15:24:30.575633809 +0000 UTC m=+4861.227833068" Nov 21 15:24:31 crc kubenswrapper[4774]: I1121 15:24:31.560244 4774 generic.go:334] "Generic (PLEG): container finished" podID="da7ac7ef-fc06-4ab2-8565-10c440aa14dc" containerID="c7339e43433862310352470391f84aa65906c0e81082b0836ceea13b28fd95d5" exitCode=1 Nov 21 15:24:31 crc kubenswrapper[4774]: I1121 15:24:31.560604 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2-default" event={"ID":"da7ac7ef-fc06-4ab2-8565-10c440aa14dc","Type":"ContainerDied","Data":"c7339e43433862310352470391f84aa65906c0e81082b0836ceea13b28fd95d5"} Nov 21 15:24:32 crc kubenswrapper[4774]: I1121 15:24:32.907472 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 21 15:24:32 crc kubenswrapper[4774]: I1121 15:24:32.940648 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 21 15:24:32 crc kubenswrapper[4774]: I1121 15:24:32.945306 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2-default"] Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.012031 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjfj6\" (UniqueName: \"kubernetes.io/projected/da7ac7ef-fc06-4ab2-8565-10c440aa14dc-kube-api-access-jjfj6\") pod \"da7ac7ef-fc06-4ab2-8565-10c440aa14dc\" (UID: \"da7ac7ef-fc06-4ab2-8565-10c440aa14dc\") " Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.016588 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da7ac7ef-fc06-4ab2-8565-10c440aa14dc-kube-api-access-jjfj6" (OuterVolumeSpecName: "kube-api-access-jjfj6") pod "da7ac7ef-fc06-4ab2-8565-10c440aa14dc" (UID: "da7ac7ef-fc06-4ab2-8565-10c440aa14dc"). InnerVolumeSpecName "kube-api-access-jjfj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.113287 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjfj6\" (UniqueName: \"kubernetes.io/projected/da7ac7ef-fc06-4ab2-8565-10c440aa14dc-kube-api-access-jjfj6\") on node \"crc\" DevicePath \"\"" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.428810 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-1"] Nov 21 15:24:33 crc kubenswrapper[4774]: E1121 15:24:33.431511 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da7ac7ef-fc06-4ab2-8565-10c440aa14dc" containerName="mariadb-client-2-default" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.431545 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="da7ac7ef-fc06-4ab2-8565-10c440aa14dc" containerName="mariadb-client-2-default" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.431782 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="da7ac7ef-fc06-4ab2-8565-10c440aa14dc" containerName="mariadb-client-2-default" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.432418 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.443909 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.521222 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhw8t\" (UniqueName: \"kubernetes.io/projected/80a0dac9-301c-4d86-9dcd-5400270b1f8b-kube-api-access-fhw8t\") pod \"mariadb-client-1\" (UID: \"80a0dac9-301c-4d86-9dcd-5400270b1f8b\") " pod="openstack/mariadb-client-1" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.585473 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="841050e1ff05cd14d1bfd18c42058671ddaa81836fa3252612216bf092319fdb" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.585516 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2-default" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.622447 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhw8t\" (UniqueName: \"kubernetes.io/projected/80a0dac9-301c-4d86-9dcd-5400270b1f8b-kube-api-access-fhw8t\") pod \"mariadb-client-1\" (UID: \"80a0dac9-301c-4d86-9dcd-5400270b1f8b\") " pod="openstack/mariadb-client-1" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.639698 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhw8t\" (UniqueName: \"kubernetes.io/projected/80a0dac9-301c-4d86-9dcd-5400270b1f8b-kube-api-access-fhw8t\") pod \"mariadb-client-1\" (UID: \"80a0dac9-301c-4d86-9dcd-5400270b1f8b\") " pod="openstack/mariadb-client-1" Nov 21 15:24:33 crc kubenswrapper[4774]: I1121 15:24:33.753448 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 21 15:24:34 crc kubenswrapper[4774]: I1121 15:24:34.102549 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da7ac7ef-fc06-4ab2-8565-10c440aa14dc" path="/var/lib/kubelet/pods/da7ac7ef-fc06-4ab2-8565-10c440aa14dc/volumes" Nov 21 15:24:34 crc kubenswrapper[4774]: I1121 15:24:34.334145 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-1"] Nov 21 15:24:34 crc kubenswrapper[4774]: W1121 15:24:34.338256 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80a0dac9_301c_4d86_9dcd_5400270b1f8b.slice/crio-f4880745c9ee8fe3ab151e29adfbda66d144d4b0ea60ea7f4a7afdf2572afadb WatchSource:0}: Error finding container f4880745c9ee8fe3ab151e29adfbda66d144d4b0ea60ea7f4a7afdf2572afadb: Status 404 returned error can't find the container with id f4880745c9ee8fe3ab151e29adfbda66d144d4b0ea60ea7f4a7afdf2572afadb Nov 21 15:24:34 crc kubenswrapper[4774]: I1121 15:24:34.593897 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"80a0dac9-301c-4d86-9dcd-5400270b1f8b","Type":"ContainerStarted","Data":"f4880745c9ee8fe3ab151e29adfbda66d144d4b0ea60ea7f4a7afdf2572afadb"} Nov 21 15:24:35 crc kubenswrapper[4774]: I1121 15:24:35.601984 4774 generic.go:334] "Generic (PLEG): container finished" podID="80a0dac9-301c-4d86-9dcd-5400270b1f8b" containerID="97ea2896fb60aea223ff83c1707a98f3faf2258cbf125e009220bf43a22f8a1c" exitCode=0 Nov 21 15:24:35 crc kubenswrapper[4774]: I1121 15:24:35.602080 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-1" event={"ID":"80a0dac9-301c-4d86-9dcd-5400270b1f8b","Type":"ContainerDied","Data":"97ea2896fb60aea223ff83c1707a98f3faf2258cbf125e009220bf43a22f8a1c"} Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:36.990223 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.009735 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-1_80a0dac9-301c-4d86-9dcd-5400270b1f8b/mariadb-client-1/0.log" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.081501 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-1"] Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.088282 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-1"] Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.104139 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhw8t\" (UniqueName: \"kubernetes.io/projected/80a0dac9-301c-4d86-9dcd-5400270b1f8b-kube-api-access-fhw8t\") pod \"80a0dac9-301c-4d86-9dcd-5400270b1f8b\" (UID: \"80a0dac9-301c-4d86-9dcd-5400270b1f8b\") " Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.118966 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80a0dac9-301c-4d86-9dcd-5400270b1f8b-kube-api-access-fhw8t" (OuterVolumeSpecName: "kube-api-access-fhw8t") pod "80a0dac9-301c-4d86-9dcd-5400270b1f8b" (UID: "80a0dac9-301c-4d86-9dcd-5400270b1f8b"). InnerVolumeSpecName "kube-api-access-fhw8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.207399 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhw8t\" (UniqueName: \"kubernetes.io/projected/80a0dac9-301c-4d86-9dcd-5400270b1f8b-kube-api-access-fhw8t\") on node \"crc\" DevicePath \"\"" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.545662 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-4-default"] Nov 21 15:24:43 crc kubenswrapper[4774]: E1121 15:24:37.546300 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80a0dac9-301c-4d86-9dcd-5400270b1f8b" containerName="mariadb-client-1" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.546323 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="80a0dac9-301c-4d86-9dcd-5400270b1f8b" containerName="mariadb-client-1" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.546635 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="80a0dac9-301c-4d86-9dcd-5400270b1f8b" containerName="mariadb-client-1" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.547523 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.561636 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.620672 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4880745c9ee8fe3ab151e29adfbda66d144d4b0ea60ea7f4a7afdf2572afadb" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.620762 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-1" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.714766 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6jtv\" (UniqueName: \"kubernetes.io/projected/06375980-5ffb-4850-8e54-cddaada694cc-kube-api-access-h6jtv\") pod \"mariadb-client-4-default\" (UID: \"06375980-5ffb-4850-8e54-cddaada694cc\") " pod="openstack/mariadb-client-4-default" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.815730 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6jtv\" (UniqueName: \"kubernetes.io/projected/06375980-5ffb-4850-8e54-cddaada694cc-kube-api-access-h6jtv\") pod \"mariadb-client-4-default\" (UID: \"06375980-5ffb-4850-8e54-cddaada694cc\") " pod="openstack/mariadb-client-4-default" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.841421 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6jtv\" (UniqueName: \"kubernetes.io/projected/06375980-5ffb-4850-8e54-cddaada694cc-kube-api-access-h6jtv\") pod \"mariadb-client-4-default\" (UID: \"06375980-5ffb-4850-8e54-cddaada694cc\") " pod="openstack/mariadb-client-4-default" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:37.880859 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 21 15:24:43 crc kubenswrapper[4774]: I1121 15:24:38.106278 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80a0dac9-301c-4d86-9dcd-5400270b1f8b" path="/var/lib/kubelet/pods/80a0dac9-301c-4d86-9dcd-5400270b1f8b/volumes" Nov 21 15:24:44 crc kubenswrapper[4774]: I1121 15:24:44.120196 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 21 15:24:44 crc kubenswrapper[4774]: I1121 15:24:44.699052 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"06375980-5ffb-4850-8e54-cddaada694cc","Type":"ContainerStarted","Data":"07894b185dc2b90ca1f1043b7eeea0be3a44d1ceb2e9312e0770770e42a3151b"} Nov 21 15:24:44 crc kubenswrapper[4774]: I1121 15:24:44.699122 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"06375980-5ffb-4850-8e54-cddaada694cc","Type":"ContainerStarted","Data":"7a98e5f8bf8127f8123222aa8c5eeae40029312fae3219b9d41c9a866877d9fc"} Nov 21 15:24:44 crc kubenswrapper[4774]: I1121 15:24:44.726298 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-4-default" podStartSLOduration=7.726249709 podStartE2EDuration="7.726249709s" podCreationTimestamp="2025-11-21 15:24:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:24:44.718652464 +0000 UTC m=+4875.370851733" watchObservedRunningTime="2025-11-21 15:24:44.726249709 +0000 UTC m=+4875.378448978" Nov 21 15:24:44 crc kubenswrapper[4774]: I1121 15:24:44.779395 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-4-default_06375980-5ffb-4850-8e54-cddaada694cc/mariadb-client-4-default/0.log" Nov 21 15:24:45 crc kubenswrapper[4774]: I1121 15:24:45.714628 4774 generic.go:334] "Generic (PLEG): container finished" podID="06375980-5ffb-4850-8e54-cddaada694cc" containerID="07894b185dc2b90ca1f1043b7eeea0be3a44d1ceb2e9312e0770770e42a3151b" exitCode=0 Nov 21 15:24:45 crc kubenswrapper[4774]: I1121 15:24:45.714674 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-4-default" event={"ID":"06375980-5ffb-4850-8e54-cddaada694cc","Type":"ContainerDied","Data":"07894b185dc2b90ca1f1043b7eeea0be3a44d1ceb2e9312e0770770e42a3151b"} Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.046933 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.088911 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.093920 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-4-default"] Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.171933 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6jtv\" (UniqueName: \"kubernetes.io/projected/06375980-5ffb-4850-8e54-cddaada694cc-kube-api-access-h6jtv\") pod \"06375980-5ffb-4850-8e54-cddaada694cc\" (UID: \"06375980-5ffb-4850-8e54-cddaada694cc\") " Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.183150 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06375980-5ffb-4850-8e54-cddaada694cc-kube-api-access-h6jtv" (OuterVolumeSpecName: "kube-api-access-h6jtv") pod "06375980-5ffb-4850-8e54-cddaada694cc" (UID: "06375980-5ffb-4850-8e54-cddaada694cc"). InnerVolumeSpecName "kube-api-access-h6jtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.273721 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6jtv\" (UniqueName: \"kubernetes.io/projected/06375980-5ffb-4850-8e54-cddaada694cc-kube-api-access-h6jtv\") on node \"crc\" DevicePath \"\"" Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.732366 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a98e5f8bf8127f8123222aa8c5eeae40029312fae3219b9d41c9a866877d9fc" Nov 21 15:24:47 crc kubenswrapper[4774]: I1121 15:24:47.732576 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-4-default" Nov 21 15:24:48 crc kubenswrapper[4774]: I1121 15:24:48.110746 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06375980-5ffb-4850-8e54-cddaada694cc" path="/var/lib/kubelet/pods/06375980-5ffb-4850-8e54-cddaada694cc/volumes" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.137425 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-5-default"] Nov 21 15:24:51 crc kubenswrapper[4774]: E1121 15:24:51.138348 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06375980-5ffb-4850-8e54-cddaada694cc" containerName="mariadb-client-4-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.138364 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="06375980-5ffb-4850-8e54-cddaada694cc" containerName="mariadb-client-4-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.138543 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="06375980-5ffb-4850-8e54-cddaada694cc" containerName="mariadb-client-4-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.139199 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.142577 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-hbfp7" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.153396 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.252199 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxgd8\" (UniqueName: \"kubernetes.io/projected/78f99f70-f4fb-4ff6-abcc-7aa1d5044144-kube-api-access-pxgd8\") pod \"mariadb-client-5-default\" (UID: \"78f99f70-f4fb-4ff6-abcc-7aa1d5044144\") " pod="openstack/mariadb-client-5-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.355078 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxgd8\" (UniqueName: \"kubernetes.io/projected/78f99f70-f4fb-4ff6-abcc-7aa1d5044144-kube-api-access-pxgd8\") pod \"mariadb-client-5-default\" (UID: \"78f99f70-f4fb-4ff6-abcc-7aa1d5044144\") " pod="openstack/mariadb-client-5-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.373383 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxgd8\" (UniqueName: \"kubernetes.io/projected/78f99f70-f4fb-4ff6-abcc-7aa1d5044144-kube-api-access-pxgd8\") pod \"mariadb-client-5-default\" (UID: \"78f99f70-f4fb-4ff6-abcc-7aa1d5044144\") " pod="openstack/mariadb-client-5-default" Nov 21 15:24:51 crc kubenswrapper[4774]: I1121 15:24:51.463413 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 21 15:24:52 crc kubenswrapper[4774]: I1121 15:24:52.018051 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 21 15:24:52 crc kubenswrapper[4774]: W1121 15:24:52.024960 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78f99f70_f4fb_4ff6_abcc_7aa1d5044144.slice/crio-ec7ac02118509cc26c577cd65a504d63cdaabe9a1f427a73424d9404d0060cf3 WatchSource:0}: Error finding container ec7ac02118509cc26c577cd65a504d63cdaabe9a1f427a73424d9404d0060cf3: Status 404 returned error can't find the container with id ec7ac02118509cc26c577cd65a504d63cdaabe9a1f427a73424d9404d0060cf3 Nov 21 15:24:52 crc kubenswrapper[4774]: I1121 15:24:52.776785 4774 generic.go:334] "Generic (PLEG): container finished" podID="78f99f70-f4fb-4ff6-abcc-7aa1d5044144" containerID="82e49591f76c39251c278bc199ada53449ccdd580d368d4716516ca63f1143a2" exitCode=0 Nov 21 15:24:52 crc kubenswrapper[4774]: I1121 15:24:52.776900 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"78f99f70-f4fb-4ff6-abcc-7aa1d5044144","Type":"ContainerDied","Data":"82e49591f76c39251c278bc199ada53449ccdd580d368d4716516ca63f1143a2"} Nov 21 15:24:52 crc kubenswrapper[4774]: I1121 15:24:52.777261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-5-default" event={"ID":"78f99f70-f4fb-4ff6-abcc-7aa1d5044144","Type":"ContainerStarted","Data":"ec7ac02118509cc26c577cd65a504d63cdaabe9a1f427a73424d9404d0060cf3"} Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.185355 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.202180 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-5-default_78f99f70-f4fb-4ff6-abcc-7aa1d5044144/mariadb-client-5-default/0.log" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.223062 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.227694 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-5-default"] Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.308724 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxgd8\" (UniqueName: \"kubernetes.io/projected/78f99f70-f4fb-4ff6-abcc-7aa1d5044144-kube-api-access-pxgd8\") pod \"78f99f70-f4fb-4ff6-abcc-7aa1d5044144\" (UID: \"78f99f70-f4fb-4ff6-abcc-7aa1d5044144\") " Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.315603 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78f99f70-f4fb-4ff6-abcc-7aa1d5044144-kube-api-access-pxgd8" (OuterVolumeSpecName: "kube-api-access-pxgd8") pod "78f99f70-f4fb-4ff6-abcc-7aa1d5044144" (UID: "78f99f70-f4fb-4ff6-abcc-7aa1d5044144"). InnerVolumeSpecName "kube-api-access-pxgd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.347440 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-6-default"] Nov 21 15:24:54 crc kubenswrapper[4774]: E1121 15:24:54.347758 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78f99f70-f4fb-4ff6-abcc-7aa1d5044144" containerName="mariadb-client-5-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.347776 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="78f99f70-f4fb-4ff6-abcc-7aa1d5044144" containerName="mariadb-client-5-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.348058 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="78f99f70-f4fb-4ff6-abcc-7aa1d5044144" containerName="mariadb-client-5-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.348619 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.365414 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.410113 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxgd8\" (UniqueName: \"kubernetes.io/projected/78f99f70-f4fb-4ff6-abcc-7aa1d5044144-kube-api-access-pxgd8\") on node \"crc\" DevicePath \"\"" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.510832 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s89d5\" (UniqueName: \"kubernetes.io/projected/13e51ee3-50c9-4913-952b-0c1c89d4a461-kube-api-access-s89d5\") pod \"mariadb-client-6-default\" (UID: \"13e51ee3-50c9-4913-952b-0c1c89d4a461\") " pod="openstack/mariadb-client-6-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.611960 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s89d5\" (UniqueName: \"kubernetes.io/projected/13e51ee3-50c9-4913-952b-0c1c89d4a461-kube-api-access-s89d5\") pod \"mariadb-client-6-default\" (UID: \"13e51ee3-50c9-4913-952b-0c1c89d4a461\") " pod="openstack/mariadb-client-6-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.631568 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s89d5\" (UniqueName: \"kubernetes.io/projected/13e51ee3-50c9-4913-952b-0c1c89d4a461-kube-api-access-s89d5\") pod \"mariadb-client-6-default\" (UID: \"13e51ee3-50c9-4913-952b-0c1c89d4a461\") " pod="openstack/mariadb-client-6-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.666733 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.806615 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec7ac02118509cc26c577cd65a504d63cdaabe9a1f427a73424d9404d0060cf3" Nov 21 15:24:54 crc kubenswrapper[4774]: I1121 15:24:54.806965 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-5-default" Nov 21 15:24:55 crc kubenswrapper[4774]: I1121 15:24:55.158169 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 21 15:24:55 crc kubenswrapper[4774]: W1121 15:24:55.163110 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13e51ee3_50c9_4913_952b_0c1c89d4a461.slice/crio-882f15826b960ffb959daf01b6ef3887e73dd0b0aca7263774d5ffaa5538a478 WatchSource:0}: Error finding container 882f15826b960ffb959daf01b6ef3887e73dd0b0aca7263774d5ffaa5538a478: Status 404 returned error can't find the container with id 882f15826b960ffb959daf01b6ef3887e73dd0b0aca7263774d5ffaa5538a478 Nov 21 15:24:55 crc kubenswrapper[4774]: I1121 15:24:55.820486 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"13e51ee3-50c9-4913-952b-0c1c89d4a461","Type":"ContainerStarted","Data":"747aa1fc52e089ff15528fd34c83c1cef5c8731c67258037e1d15a9db1d8d8ca"} Nov 21 15:24:55 crc kubenswrapper[4774]: I1121 15:24:55.822055 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"13e51ee3-50c9-4913-952b-0c1c89d4a461","Type":"ContainerStarted","Data":"882f15826b960ffb959daf01b6ef3887e73dd0b0aca7263774d5ffaa5538a478"} Nov 21 15:24:55 crc kubenswrapper[4774]: I1121 15:24:55.836900 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-client-6-default" podStartSLOduration=1.836870693 podStartE2EDuration="1.836870693s" podCreationTimestamp="2025-11-21 15:24:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:24:55.834594659 +0000 UTC m=+4886.486794008" watchObservedRunningTime="2025-11-21 15:24:55.836870693 +0000 UTC m=+4886.489069952" Nov 21 15:24:55 crc kubenswrapper[4774]: I1121 15:24:55.922314 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-6-default_13e51ee3-50c9-4913-952b-0c1c89d4a461/mariadb-client-6-default/0.log" Nov 21 15:24:56 crc kubenswrapper[4774]: I1121 15:24:56.103471 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78f99f70-f4fb-4ff6-abcc-7aa1d5044144" path="/var/lib/kubelet/pods/78f99f70-f4fb-4ff6-abcc-7aa1d5044144/volumes" Nov 21 15:24:56 crc kubenswrapper[4774]: I1121 15:24:56.833953 4774 generic.go:334] "Generic (PLEG): container finished" podID="13e51ee3-50c9-4913-952b-0c1c89d4a461" containerID="747aa1fc52e089ff15528fd34c83c1cef5c8731c67258037e1d15a9db1d8d8ca" exitCode=1 Nov 21 15:24:56 crc kubenswrapper[4774]: I1121 15:24:56.834029 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-6-default" event={"ID":"13e51ee3-50c9-4913-952b-0c1c89d4a461","Type":"ContainerDied","Data":"747aa1fc52e089ff15528fd34c83c1cef5c8731c67258037e1d15a9db1d8d8ca"} Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.172713 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.211022 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.212506 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-6-default"] Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.271936 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s89d5\" (UniqueName: \"kubernetes.io/projected/13e51ee3-50c9-4913-952b-0c1c89d4a461-kube-api-access-s89d5\") pod \"13e51ee3-50c9-4913-952b-0c1c89d4a461\" (UID: \"13e51ee3-50c9-4913-952b-0c1c89d4a461\") " Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.278186 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13e51ee3-50c9-4913-952b-0c1c89d4a461-kube-api-access-s89d5" (OuterVolumeSpecName: "kube-api-access-s89d5") pod "13e51ee3-50c9-4913-952b-0c1c89d4a461" (UID: "13e51ee3-50c9-4913-952b-0c1c89d4a461"). InnerVolumeSpecName "kube-api-access-s89d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.330480 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-7-default"] Nov 21 15:24:58 crc kubenswrapper[4774]: E1121 15:24:58.331030 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13e51ee3-50c9-4913-952b-0c1c89d4a461" containerName="mariadb-client-6-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.331094 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="13e51ee3-50c9-4913-952b-0c1c89d4a461" containerName="mariadb-client-6-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.331321 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="13e51ee3-50c9-4913-952b-0c1c89d4a461" containerName="mariadb-client-6-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.332000 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.338501 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.373237 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s89d5\" (UniqueName: \"kubernetes.io/projected/13e51ee3-50c9-4913-952b-0c1c89d4a461-kube-api-access-s89d5\") on node \"crc\" DevicePath \"\"" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.474451 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njmph\" (UniqueName: \"kubernetes.io/projected/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5-kube-api-access-njmph\") pod \"mariadb-client-7-default\" (UID: \"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5\") " pod="openstack/mariadb-client-7-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.576617 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njmph\" (UniqueName: \"kubernetes.io/projected/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5-kube-api-access-njmph\") pod \"mariadb-client-7-default\" (UID: \"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5\") " pod="openstack/mariadb-client-7-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.595803 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njmph\" (UniqueName: \"kubernetes.io/projected/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5-kube-api-access-njmph\") pod \"mariadb-client-7-default\" (UID: \"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5\") " pod="openstack/mariadb-client-7-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.649806 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.854642 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="882f15826b960ffb959daf01b6ef3887e73dd0b0aca7263774d5ffaa5538a478" Nov 21 15:24:58 crc kubenswrapper[4774]: I1121 15:24:58.854709 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-6-default" Nov 21 15:24:59 crc kubenswrapper[4774]: I1121 15:24:59.208086 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 21 15:24:59 crc kubenswrapper[4774]: I1121 15:24:59.601051 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:24:59 crc kubenswrapper[4774]: I1121 15:24:59.601145 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:24:59 crc kubenswrapper[4774]: I1121 15:24:59.867763 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5","Type":"ContainerStarted","Data":"aa546c2fcb6642170d8e8c924e8c4cc70c420347973d45bd3a1626c9828bc1c3"} Nov 21 15:25:00 crc kubenswrapper[4774]: I1121 15:25:00.103918 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13e51ee3-50c9-4913-952b-0c1c89d4a461" path="/var/lib/kubelet/pods/13e51ee3-50c9-4913-952b-0c1c89d4a461/volumes" Nov 21 15:25:00 crc kubenswrapper[4774]: I1121 15:25:00.878413 4774 generic.go:334] "Generic (PLEG): container finished" podID="a836b73f-1ad6-4e07-b9e8-1aa688a97cc5" containerID="52b4f9073277db773ac58fabc7062ef8908880d5f7a9afa0e8e05ecd0fe1ec6a" exitCode=0 Nov 21 15:25:00 crc kubenswrapper[4774]: I1121 15:25:00.879237 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-7-default" event={"ID":"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5","Type":"ContainerDied","Data":"52b4f9073277db773ac58fabc7062ef8908880d5f7a9afa0e8e05ecd0fe1ec6a"} Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.244732 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.265951 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-7-default_a836b73f-1ad6-4e07-b9e8-1aa688a97cc5/mariadb-client-7-default/0.log" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.290775 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.296470 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-7-default"] Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.331848 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njmph\" (UniqueName: \"kubernetes.io/projected/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5-kube-api-access-njmph\") pod \"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5\" (UID: \"a836b73f-1ad6-4e07-b9e8-1aa688a97cc5\") " Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.337563 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5-kube-api-access-njmph" (OuterVolumeSpecName: "kube-api-access-njmph") pod "a836b73f-1ad6-4e07-b9e8-1aa688a97cc5" (UID: "a836b73f-1ad6-4e07-b9e8-1aa688a97cc5"). InnerVolumeSpecName "kube-api-access-njmph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.412656 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client-2"] Nov 21 15:25:02 crc kubenswrapper[4774]: E1121 15:25:02.415177 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a836b73f-1ad6-4e07-b9e8-1aa688a97cc5" containerName="mariadb-client-7-default" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.415216 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a836b73f-1ad6-4e07-b9e8-1aa688a97cc5" containerName="mariadb-client-7-default" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.415529 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a836b73f-1ad6-4e07-b9e8-1aa688a97cc5" containerName="mariadb-client-7-default" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.416115 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.419530 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.433091 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njmph\" (UniqueName: \"kubernetes.io/projected/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5-kube-api-access-njmph\") on node \"crc\" DevicePath \"\"" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.534173 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25gzl\" (UniqueName: \"kubernetes.io/projected/ac8e4234-c478-4f33-8911-d03891c7df33-kube-api-access-25gzl\") pod \"mariadb-client-2\" (UID: \"ac8e4234-c478-4f33-8911-d03891c7df33\") " pod="openstack/mariadb-client-2" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.635565 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25gzl\" (UniqueName: \"kubernetes.io/projected/ac8e4234-c478-4f33-8911-d03891c7df33-kube-api-access-25gzl\") pod \"mariadb-client-2\" (UID: \"ac8e4234-c478-4f33-8911-d03891c7df33\") " pod="openstack/mariadb-client-2" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.653914 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25gzl\" (UniqueName: \"kubernetes.io/projected/ac8e4234-c478-4f33-8911-d03891c7df33-kube-api-access-25gzl\") pod \"mariadb-client-2\" (UID: \"ac8e4234-c478-4f33-8911-d03891c7df33\") " pod="openstack/mariadb-client-2" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.736748 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.900184 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa546c2fcb6642170d8e8c924e8c4cc70c420347973d45bd3a1626c9828bc1c3" Nov 21 15:25:02 crc kubenswrapper[4774]: I1121 15:25:02.900254 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-7-default" Nov 21 15:25:03 crc kubenswrapper[4774]: I1121 15:25:03.005447 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client-2"] Nov 21 15:25:03 crc kubenswrapper[4774]: W1121 15:25:03.010346 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac8e4234_c478_4f33_8911_d03891c7df33.slice/crio-d7b5b3a7ecbee87cbc38f0ae399e2a94d74cd5e229fcfedeebaffe398e20e6b0 WatchSource:0}: Error finding container d7b5b3a7ecbee87cbc38f0ae399e2a94d74cd5e229fcfedeebaffe398e20e6b0: Status 404 returned error can't find the container with id d7b5b3a7ecbee87cbc38f0ae399e2a94d74cd5e229fcfedeebaffe398e20e6b0 Nov 21 15:25:03 crc kubenswrapper[4774]: I1121 15:25:03.914534 4774 generic.go:334] "Generic (PLEG): container finished" podID="ac8e4234-c478-4f33-8911-d03891c7df33" containerID="932d1edfe5564c75a077105a3aa624b7ff8cf75d2d48584b5168c91404dbed69" exitCode=0 Nov 21 15:25:03 crc kubenswrapper[4774]: I1121 15:25:03.914863 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"ac8e4234-c478-4f33-8911-d03891c7df33","Type":"ContainerDied","Data":"932d1edfe5564c75a077105a3aa624b7ff8cf75d2d48584b5168c91404dbed69"} Nov 21 15:25:03 crc kubenswrapper[4774]: I1121 15:25:03.914904 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client-2" event={"ID":"ac8e4234-c478-4f33-8911-d03891c7df33","Type":"ContainerStarted","Data":"d7b5b3a7ecbee87cbc38f0ae399e2a94d74cd5e229fcfedeebaffe398e20e6b0"} Nov 21 15:25:04 crc kubenswrapper[4774]: I1121 15:25:04.112260 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a836b73f-1ad6-4e07-b9e8-1aa688a97cc5" path="/var/lib/kubelet/pods/a836b73f-1ad6-4e07-b9e8-1aa688a97cc5/volumes" Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.290841 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.312681 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client-2_ac8e4234-c478-4f33-8911-d03891c7df33/mariadb-client-2/0.log" Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.337708 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client-2"] Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.342567 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client-2"] Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.374590 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25gzl\" (UniqueName: \"kubernetes.io/projected/ac8e4234-c478-4f33-8911-d03891c7df33-kube-api-access-25gzl\") pod \"ac8e4234-c478-4f33-8911-d03891c7df33\" (UID: \"ac8e4234-c478-4f33-8911-d03891c7df33\") " Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.382130 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac8e4234-c478-4f33-8911-d03891c7df33-kube-api-access-25gzl" (OuterVolumeSpecName: "kube-api-access-25gzl") pod "ac8e4234-c478-4f33-8911-d03891c7df33" (UID: "ac8e4234-c478-4f33-8911-d03891c7df33"). InnerVolumeSpecName "kube-api-access-25gzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.476505 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25gzl\" (UniqueName: \"kubernetes.io/projected/ac8e4234-c478-4f33-8911-d03891c7df33-kube-api-access-25gzl\") on node \"crc\" DevicePath \"\"" Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.934884 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7b5b3a7ecbee87cbc38f0ae399e2a94d74cd5e229fcfedeebaffe398e20e6b0" Nov 21 15:25:05 crc kubenswrapper[4774]: I1121 15:25:05.934967 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client-2" Nov 21 15:25:06 crc kubenswrapper[4774]: I1121 15:25:06.103636 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac8e4234-c478-4f33-8911-d03891c7df33" path="/var/lib/kubelet/pods/ac8e4234-c478-4f33-8911-d03891c7df33/volumes" Nov 21 15:25:29 crc kubenswrapper[4774]: I1121 15:25:29.600789 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:25:29 crc kubenswrapper[4774]: I1121 15:25:29.601607 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:25:29 crc kubenswrapper[4774]: I1121 15:25:29.601674 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:25:29 crc kubenswrapper[4774]: I1121 15:25:29.602515 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"55e5bb7215c0e7b96cb956f72f36e0242e48205521d6294be282760e5b7b20ab"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:25:29 crc kubenswrapper[4774]: I1121 15:25:29.602641 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://55e5bb7215c0e7b96cb956f72f36e0242e48205521d6294be282760e5b7b20ab" gracePeriod=600 Nov 21 15:25:30 crc kubenswrapper[4774]: I1121 15:25:30.142866 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="55e5bb7215c0e7b96cb956f72f36e0242e48205521d6294be282760e5b7b20ab" exitCode=0 Nov 21 15:25:30 crc kubenswrapper[4774]: I1121 15:25:30.142946 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"55e5bb7215c0e7b96cb956f72f36e0242e48205521d6294be282760e5b7b20ab"} Nov 21 15:25:30 crc kubenswrapper[4774]: I1121 15:25:30.143266 4774 scope.go:117] "RemoveContainer" containerID="77b7b2229d5efd37bdbc036db9565daa9d00c1937a7c5d5772c02b8ca4819bcd" Nov 21 15:25:31 crc kubenswrapper[4774]: I1121 15:25:31.152620 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc"} Nov 21 15:25:39 crc kubenswrapper[4774]: I1121 15:25:39.793122 4774 scope.go:117] "RemoveContainer" containerID="1ce1c59098c450c11143dc9e0076e0b56e688f99ed95cd349b890af6022ac765" Nov 21 15:27:59 crc kubenswrapper[4774]: I1121 15:27:59.601246 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:27:59 crc kubenswrapper[4774]: I1121 15:27:59.602626 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:28:29 crc kubenswrapper[4774]: I1121 15:28:29.601573 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:28:29 crc kubenswrapper[4774]: I1121 15:28:29.602348 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.070382 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fbjgj"] Nov 21 15:28:34 crc kubenswrapper[4774]: E1121 15:28:34.071482 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac8e4234-c478-4f33-8911-d03891c7df33" containerName="mariadb-client-2" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.071503 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac8e4234-c478-4f33-8911-d03891c7df33" containerName="mariadb-client-2" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.071748 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac8e4234-c478-4f33-8911-d03891c7df33" containerName="mariadb-client-2" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.073624 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.086482 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fbjgj"] Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.182068 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-utilities\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.182105 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmxkg\" (UniqueName: \"kubernetes.io/projected/9754f7aa-8618-4e4a-963f-20ba214a64af-kube-api-access-lmxkg\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.182164 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-catalog-content\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.284327 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-catalog-content\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.284478 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-utilities\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.284507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmxkg\" (UniqueName: \"kubernetes.io/projected/9754f7aa-8618-4e4a-963f-20ba214a64af-kube-api-access-lmxkg\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.285011 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-catalog-content\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.285011 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-utilities\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.309345 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmxkg\" (UniqueName: \"kubernetes.io/projected/9754f7aa-8618-4e4a-963f-20ba214a64af-kube-api-access-lmxkg\") pod \"certified-operators-fbjgj\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.396528 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:34 crc kubenswrapper[4774]: I1121 15:28:34.882013 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fbjgj"] Nov 21 15:28:35 crc kubenswrapper[4774]: I1121 15:28:35.697958 4774 generic.go:334] "Generic (PLEG): container finished" podID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerID="5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe" exitCode=0 Nov 21 15:28:35 crc kubenswrapper[4774]: I1121 15:28:35.698020 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbjgj" event={"ID":"9754f7aa-8618-4e4a-963f-20ba214a64af","Type":"ContainerDied","Data":"5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe"} Nov 21 15:28:35 crc kubenswrapper[4774]: I1121 15:28:35.698280 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbjgj" event={"ID":"9754f7aa-8618-4e4a-963f-20ba214a64af","Type":"ContainerStarted","Data":"f099a2a5b4411d332c34bbdce212338b9b5828d96e5cd2169f4c38d3d734f4fa"} Nov 21 15:28:36 crc kubenswrapper[4774]: I1121 15:28:36.708680 4774 generic.go:334] "Generic (PLEG): container finished" podID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerID="82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14" exitCode=0 Nov 21 15:28:36 crc kubenswrapper[4774]: I1121 15:28:36.708754 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbjgj" event={"ID":"9754f7aa-8618-4e4a-963f-20ba214a64af","Type":"ContainerDied","Data":"82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14"} Nov 21 15:28:37 crc kubenswrapper[4774]: I1121 15:28:37.718552 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbjgj" event={"ID":"9754f7aa-8618-4e4a-963f-20ba214a64af","Type":"ContainerStarted","Data":"2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84"} Nov 21 15:28:37 crc kubenswrapper[4774]: I1121 15:28:37.741384 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fbjgj" podStartSLOduration=2.33637993 podStartE2EDuration="3.741364194s" podCreationTimestamp="2025-11-21 15:28:34 +0000 UTC" firstStartedPulling="2025-11-21 15:28:35.699419836 +0000 UTC m=+5106.351619115" lastFinishedPulling="2025-11-21 15:28:37.10440411 +0000 UTC m=+5107.756603379" observedRunningTime="2025-11-21 15:28:37.736392943 +0000 UTC m=+5108.388592202" watchObservedRunningTime="2025-11-21 15:28:37.741364194 +0000 UTC m=+5108.393563463" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.291189 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dr6md"] Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.294723 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.315934 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr6md"] Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.397360 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.397453 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.439176 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-utilities\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.439338 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-catalog-content\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.439459 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq4qx\" (UniqueName: \"kubernetes.io/projected/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-kube-api-access-sq4qx\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.444288 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.541018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq4qx\" (UniqueName: \"kubernetes.io/projected/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-kube-api-access-sq4qx\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.541098 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-utilities\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.541174 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-catalog-content\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.541661 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-catalog-content\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.542172 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-utilities\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.565689 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq4qx\" (UniqueName: \"kubernetes.io/projected/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-kube-api-access-sq4qx\") pod \"redhat-marketplace-dr6md\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.623664 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:44 crc kubenswrapper[4774]: I1121 15:28:44.823302 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:45 crc kubenswrapper[4774]: I1121 15:28:45.097189 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr6md"] Nov 21 15:28:45 crc kubenswrapper[4774]: I1121 15:28:45.787400 4774 generic.go:334] "Generic (PLEG): container finished" podID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerID="b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba" exitCode=0 Nov 21 15:28:45 crc kubenswrapper[4774]: I1121 15:28:45.787466 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerDied","Data":"b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba"} Nov 21 15:28:45 crc kubenswrapper[4774]: I1121 15:28:45.789013 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerStarted","Data":"dc20b23c04de75c008cc4448eb9361f593965b68b754ec2fef5c790ca0e2e03e"} Nov 21 15:28:46 crc kubenswrapper[4774]: I1121 15:28:46.692720 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fbjgj"] Nov 21 15:28:46 crc kubenswrapper[4774]: I1121 15:28:46.798796 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerStarted","Data":"eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e"} Nov 21 15:28:47 crc kubenswrapper[4774]: I1121 15:28:47.809723 4774 generic.go:334] "Generic (PLEG): container finished" podID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerID="eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e" exitCode=0 Nov 21 15:28:47 crc kubenswrapper[4774]: I1121 15:28:47.809791 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerDied","Data":"eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e"} Nov 21 15:28:47 crc kubenswrapper[4774]: I1121 15:28:47.809993 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fbjgj" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="registry-server" containerID="cri-o://2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84" gracePeriod=2 Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.212508 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.312180 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-utilities\") pod \"9754f7aa-8618-4e4a-963f-20ba214a64af\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.312442 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-catalog-content\") pod \"9754f7aa-8618-4e4a-963f-20ba214a64af\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.312552 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmxkg\" (UniqueName: \"kubernetes.io/projected/9754f7aa-8618-4e4a-963f-20ba214a64af-kube-api-access-lmxkg\") pod \"9754f7aa-8618-4e4a-963f-20ba214a64af\" (UID: \"9754f7aa-8618-4e4a-963f-20ba214a64af\") " Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.313263 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-utilities" (OuterVolumeSpecName: "utilities") pod "9754f7aa-8618-4e4a-963f-20ba214a64af" (UID: "9754f7aa-8618-4e4a-963f-20ba214a64af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.314072 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.317580 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9754f7aa-8618-4e4a-963f-20ba214a64af-kube-api-access-lmxkg" (OuterVolumeSpecName: "kube-api-access-lmxkg") pod "9754f7aa-8618-4e4a-963f-20ba214a64af" (UID: "9754f7aa-8618-4e4a-963f-20ba214a64af"). InnerVolumeSpecName "kube-api-access-lmxkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.364632 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9754f7aa-8618-4e4a-963f-20ba214a64af" (UID: "9754f7aa-8618-4e4a-963f-20ba214a64af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.416624 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9754f7aa-8618-4e4a-963f-20ba214a64af-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.417152 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmxkg\" (UniqueName: \"kubernetes.io/projected/9754f7aa-8618-4e4a-963f-20ba214a64af-kube-api-access-lmxkg\") on node \"crc\" DevicePath \"\"" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.819083 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerStarted","Data":"2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a"} Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.820679 4774 generic.go:334] "Generic (PLEG): container finished" podID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerID="2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84" exitCode=0 Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.820705 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbjgj" event={"ID":"9754f7aa-8618-4e4a-963f-20ba214a64af","Type":"ContainerDied","Data":"2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84"} Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.820833 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fbjgj" event={"ID":"9754f7aa-8618-4e4a-963f-20ba214a64af","Type":"ContainerDied","Data":"f099a2a5b4411d332c34bbdce212338b9b5828d96e5cd2169f4c38d3d734f4fa"} Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.820761 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fbjgj" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.820863 4774 scope.go:117] "RemoveContainer" containerID="2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.837876 4774 scope.go:117] "RemoveContainer" containerID="82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.852374 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dr6md" podStartSLOduration=2.337707022 podStartE2EDuration="4.852334401s" podCreationTimestamp="2025-11-21 15:28:44 +0000 UTC" firstStartedPulling="2025-11-21 15:28:45.789316175 +0000 UTC m=+5116.441515434" lastFinishedPulling="2025-11-21 15:28:48.303943554 +0000 UTC m=+5118.956142813" observedRunningTime="2025-11-21 15:28:48.836565614 +0000 UTC m=+5119.488764873" watchObservedRunningTime="2025-11-21 15:28:48.852334401 +0000 UTC m=+5119.504533660" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.857447 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fbjgj"] Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.863159 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fbjgj"] Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.872790 4774 scope.go:117] "RemoveContainer" containerID="5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.887436 4774 scope.go:117] "RemoveContainer" containerID="2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84" Nov 21 15:28:48 crc kubenswrapper[4774]: E1121 15:28:48.887894 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84\": container with ID starting with 2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84 not found: ID does not exist" containerID="2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.887958 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84"} err="failed to get container status \"2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84\": rpc error: code = NotFound desc = could not find container \"2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84\": container with ID starting with 2b3df4d04f5b7915438d94eff8fc5e8f8164277f289a1ad4dd11d0587e296a84 not found: ID does not exist" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.887993 4774 scope.go:117] "RemoveContainer" containerID="82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14" Nov 21 15:28:48 crc kubenswrapper[4774]: E1121 15:28:48.888321 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14\": container with ID starting with 82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14 not found: ID does not exist" containerID="82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.888361 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14"} err="failed to get container status \"82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14\": rpc error: code = NotFound desc = could not find container \"82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14\": container with ID starting with 82fbd9d640b1b51143e9bf770a5505d73bddefb04887e04faeaf60c4ad801f14 not found: ID does not exist" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.888382 4774 scope.go:117] "RemoveContainer" containerID="5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe" Nov 21 15:28:48 crc kubenswrapper[4774]: E1121 15:28:48.888651 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe\": container with ID starting with 5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe not found: ID does not exist" containerID="5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe" Nov 21 15:28:48 crc kubenswrapper[4774]: I1121 15:28:48.888673 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe"} err="failed to get container status \"5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe\": rpc error: code = NotFound desc = could not find container \"5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe\": container with ID starting with 5bc414c99d6f01f6533cafd26527b386a200da1acc40d44627ce5d775bbbd7fe not found: ID does not exist" Nov 21 15:28:50 crc kubenswrapper[4774]: I1121 15:28:50.107095 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" path="/var/lib/kubelet/pods/9754f7aa-8618-4e4a-963f-20ba214a64af/volumes" Nov 21 15:28:54 crc kubenswrapper[4774]: I1121 15:28:54.624399 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:54 crc kubenswrapper[4774]: I1121 15:28:54.624702 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:54 crc kubenswrapper[4774]: I1121 15:28:54.708181 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:54 crc kubenswrapper[4774]: I1121 15:28:54.919343 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:54 crc kubenswrapper[4774]: I1121 15:28:54.966063 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr6md"] Nov 21 15:28:56 crc kubenswrapper[4774]: I1121 15:28:56.890920 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dr6md" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="registry-server" containerID="cri-o://2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a" gracePeriod=2 Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.315332 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.458445 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-catalog-content\") pod \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.458566 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-utilities\") pod \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.458705 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq4qx\" (UniqueName: \"kubernetes.io/projected/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-kube-api-access-sq4qx\") pod \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\" (UID: \"d9a584b0-2009-4784-9b80-dc0b0c27b0e5\") " Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.459514 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-utilities" (OuterVolumeSpecName: "utilities") pod "d9a584b0-2009-4784-9b80-dc0b0c27b0e5" (UID: "d9a584b0-2009-4784-9b80-dc0b0c27b0e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.464405 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-kube-api-access-sq4qx" (OuterVolumeSpecName: "kube-api-access-sq4qx") pod "d9a584b0-2009-4784-9b80-dc0b0c27b0e5" (UID: "d9a584b0-2009-4784-9b80-dc0b0c27b0e5"). InnerVolumeSpecName "kube-api-access-sq4qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.483925 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9a584b0-2009-4784-9b80-dc0b0c27b0e5" (UID: "d9a584b0-2009-4784-9b80-dc0b0c27b0e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.561391 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq4qx\" (UniqueName: \"kubernetes.io/projected/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-kube-api-access-sq4qx\") on node \"crc\" DevicePath \"\"" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.561440 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.561450 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9a584b0-2009-4784-9b80-dc0b0c27b0e5-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.903195 4774 generic.go:334] "Generic (PLEG): container finished" podID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerID="2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a" exitCode=0 Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.903268 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerDied","Data":"2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a"} Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.903295 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dr6md" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.903324 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dr6md" event={"ID":"d9a584b0-2009-4784-9b80-dc0b0c27b0e5","Type":"ContainerDied","Data":"dc20b23c04de75c008cc4448eb9361f593965b68b754ec2fef5c790ca0e2e03e"} Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.903352 4774 scope.go:117] "RemoveContainer" containerID="2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.926109 4774 scope.go:117] "RemoveContainer" containerID="eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e" Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.944279 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr6md"] Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.956149 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dr6md"] Nov 21 15:28:57 crc kubenswrapper[4774]: I1121 15:28:57.968195 4774 scope.go:117] "RemoveContainer" containerID="b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.011204 4774 scope.go:117] "RemoveContainer" containerID="2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a" Nov 21 15:28:58 crc kubenswrapper[4774]: E1121 15:28:58.011813 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a\": container with ID starting with 2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a not found: ID does not exist" containerID="2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.011904 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a"} err="failed to get container status \"2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a\": rpc error: code = NotFound desc = could not find container \"2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a\": container with ID starting with 2d9ea438a60a3f649e7c474cf448f7ad35d79f8e7560df001599acc120f0e85a not found: ID does not exist" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.011954 4774 scope.go:117] "RemoveContainer" containerID="eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e" Nov 21 15:28:58 crc kubenswrapper[4774]: E1121 15:28:58.012317 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e\": container with ID starting with eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e not found: ID does not exist" containerID="eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.012373 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e"} err="failed to get container status \"eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e\": rpc error: code = NotFound desc = could not find container \"eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e\": container with ID starting with eb4bd12184e195db4d4194abff92f7b415b1a4b6a5cfe8c199be6e5eb8fdff6e not found: ID does not exist" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.012409 4774 scope.go:117] "RemoveContainer" containerID="b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba" Nov 21 15:28:58 crc kubenswrapper[4774]: E1121 15:28:58.012787 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba\": container with ID starting with b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba not found: ID does not exist" containerID="b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.012882 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba"} err="failed to get container status \"b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba\": rpc error: code = NotFound desc = could not find container \"b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba\": container with ID starting with b1c0116a41ec4b01e57ef8aecbfd19553735eb66081b6d3dc127c76680341bba not found: ID does not exist" Nov 21 15:28:58 crc kubenswrapper[4774]: I1121 15:28:58.104802 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" path="/var/lib/kubelet/pods/d9a584b0-2009-4784-9b80-dc0b0c27b0e5/volumes" Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.601290 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.602068 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.602165 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.603641 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.603777 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" gracePeriod=600 Nov 21 15:28:59 crc kubenswrapper[4774]: E1121 15:28:59.737276 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.921872 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" exitCode=0 Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.921919 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc"} Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.921956 4774 scope.go:117] "RemoveContainer" containerID="55e5bb7215c0e7b96cb956f72f36e0242e48205521d6294be282760e5b7b20ab" Nov 21 15:28:59 crc kubenswrapper[4774]: I1121 15:28:59.922605 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:28:59 crc kubenswrapper[4774]: E1121 15:28:59.922845 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.496268 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-copy-data"] Nov 21 15:29:11 crc kubenswrapper[4774]: E1121 15:29:11.497729 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="registry-server" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.497749 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="registry-server" Nov 21 15:29:11 crc kubenswrapper[4774]: E1121 15:29:11.497764 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="registry-server" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.497771 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="registry-server" Nov 21 15:29:11 crc kubenswrapper[4774]: E1121 15:29:11.497789 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="extract-utilities" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.497796 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="extract-utilities" Nov 21 15:29:11 crc kubenswrapper[4774]: E1121 15:29:11.497809 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="extract-utilities" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.497832 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="extract-utilities" Nov 21 15:29:11 crc kubenswrapper[4774]: E1121 15:29:11.497849 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="extract-content" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.497856 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="extract-content" Nov 21 15:29:11 crc kubenswrapper[4774]: E1121 15:29:11.497874 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="extract-content" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.497883 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="extract-content" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.498107 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9a584b0-2009-4784-9b80-dc0b0c27b0e5" containerName="registry-server" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.498123 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9754f7aa-8618-4e4a-963f-20ba214a64af" containerName="registry-server" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.498895 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.501805 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-hbfp7" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.508698 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.576098 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.576239 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwr4w\" (UniqueName: \"kubernetes.io/projected/053368cd-72d7-4402-a577-40330d37399d-kube-api-access-fwr4w\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.677501 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwr4w\" (UniqueName: \"kubernetes.io/projected/053368cd-72d7-4402-a577-40330d37399d-kube-api-access-fwr4w\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.677635 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.680341 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.680378 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c43cc241f8f886f7a1ea366bb17df4458e5336fc8dad4ea510d860fdacace8c4/globalmount\"" pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.697851 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwr4w\" (UniqueName: \"kubernetes.io/projected/053368cd-72d7-4402-a577-40330d37399d-kube-api-access-fwr4w\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.718869 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") pod \"mariadb-copy-data\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " pod="openstack/mariadb-copy-data" Nov 21 15:29:11 crc kubenswrapper[4774]: I1121 15:29:11.827239 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 21 15:29:12 crc kubenswrapper[4774]: I1121 15:29:12.326139 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-copy-data"] Nov 21 15:29:13 crc kubenswrapper[4774]: I1121 15:29:13.056395 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"053368cd-72d7-4402-a577-40330d37399d","Type":"ContainerStarted","Data":"cb0c6b7c21b867b1a4af5ae43ca4a80efe310250cb8187841bb70d0ee3046fd1"} Nov 21 15:29:13 crc kubenswrapper[4774]: I1121 15:29:13.056700 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"053368cd-72d7-4402-a577-40330d37399d","Type":"ContainerStarted","Data":"8accd161e2705082d2ca1c1fb075c960b5242a713064ff7cdef4ac467ff122e6"} Nov 21 15:29:13 crc kubenswrapper[4774]: I1121 15:29:13.073277 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/mariadb-copy-data" podStartSLOduration=3.073258179 podStartE2EDuration="3.073258179s" podCreationTimestamp="2025-11-21 15:29:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:13.068947367 +0000 UTC m=+5143.721146646" watchObservedRunningTime="2025-11-21 15:29:13.073258179 +0000 UTC m=+5143.725457438" Nov 21 15:29:14 crc kubenswrapper[4774]: I1121 15:29:14.093738 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:29:14 crc kubenswrapper[4774]: E1121 15:29:14.094013 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:29:15 crc kubenswrapper[4774]: I1121 15:29:15.793691 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:15 crc kubenswrapper[4774]: I1121 15:29:15.796177 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:15 crc kubenswrapper[4774]: I1121 15:29:15.803489 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:15 crc kubenswrapper[4774]: I1121 15:29:15.840209 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqf72\" (UniqueName: \"kubernetes.io/projected/07037dff-47a9-44df-b61a-1f0104cf5583-kube-api-access-fqf72\") pod \"mariadb-client\" (UID: \"07037dff-47a9-44df-b61a-1f0104cf5583\") " pod="openstack/mariadb-client" Nov 21 15:29:15 crc kubenswrapper[4774]: I1121 15:29:15.941997 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqf72\" (UniqueName: \"kubernetes.io/projected/07037dff-47a9-44df-b61a-1f0104cf5583-kube-api-access-fqf72\") pod \"mariadb-client\" (UID: \"07037dff-47a9-44df-b61a-1f0104cf5583\") " pod="openstack/mariadb-client" Nov 21 15:29:15 crc kubenswrapper[4774]: I1121 15:29:15.959232 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqf72\" (UniqueName: \"kubernetes.io/projected/07037dff-47a9-44df-b61a-1f0104cf5583-kube-api-access-fqf72\") pod \"mariadb-client\" (UID: \"07037dff-47a9-44df-b61a-1f0104cf5583\") " pod="openstack/mariadb-client" Nov 21 15:29:16 crc kubenswrapper[4774]: I1121 15:29:16.121904 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:16 crc kubenswrapper[4774]: I1121 15:29:16.536465 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:17 crc kubenswrapper[4774]: I1121 15:29:17.091258 4774 generic.go:334] "Generic (PLEG): container finished" podID="07037dff-47a9-44df-b61a-1f0104cf5583" containerID="a5cefbace06a8ddd5ebcf5ad785961f1fa7333d7f52d45983228ee60c195b53e" exitCode=0 Nov 21 15:29:17 crc kubenswrapper[4774]: I1121 15:29:17.091307 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"07037dff-47a9-44df-b61a-1f0104cf5583","Type":"ContainerDied","Data":"a5cefbace06a8ddd5ebcf5ad785961f1fa7333d7f52d45983228ee60c195b53e"} Nov 21 15:29:17 crc kubenswrapper[4774]: I1121 15:29:17.091561 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"07037dff-47a9-44df-b61a-1f0104cf5583","Type":"ContainerStarted","Data":"5fcd4a3533155f3916073f82b65b68dc3cd9d2a49aa5984cd2460572eb758611"} Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.365026 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.388597 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_07037dff-47a9-44df-b61a-1f0104cf5583/mariadb-client/0.log" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.409926 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.418032 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.476719 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqf72\" (UniqueName: \"kubernetes.io/projected/07037dff-47a9-44df-b61a-1f0104cf5583-kube-api-access-fqf72\") pod \"07037dff-47a9-44df-b61a-1f0104cf5583\" (UID: \"07037dff-47a9-44df-b61a-1f0104cf5583\") " Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.482011 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07037dff-47a9-44df-b61a-1f0104cf5583-kube-api-access-fqf72" (OuterVolumeSpecName: "kube-api-access-fqf72") pod "07037dff-47a9-44df-b61a-1f0104cf5583" (UID: "07037dff-47a9-44df-b61a-1f0104cf5583"). InnerVolumeSpecName "kube-api-access-fqf72". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.534534 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:18 crc kubenswrapper[4774]: E1121 15:29:18.535040 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07037dff-47a9-44df-b61a-1f0104cf5583" containerName="mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.535070 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="07037dff-47a9-44df-b61a-1f0104cf5583" containerName="mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.535984 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="07037dff-47a9-44df-b61a-1f0104cf5583" containerName="mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.536897 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.552501 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.578766 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm9rx\" (UniqueName: \"kubernetes.io/projected/df6f30ab-204a-4ca7-8a69-4a88575288f2-kube-api-access-dm9rx\") pod \"mariadb-client\" (UID: \"df6f30ab-204a-4ca7-8a69-4a88575288f2\") " pod="openstack/mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.578871 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqf72\" (UniqueName: \"kubernetes.io/projected/07037dff-47a9-44df-b61a-1f0104cf5583-kube-api-access-fqf72\") on node \"crc\" DevicePath \"\"" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.680344 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm9rx\" (UniqueName: \"kubernetes.io/projected/df6f30ab-204a-4ca7-8a69-4a88575288f2-kube-api-access-dm9rx\") pod \"mariadb-client\" (UID: \"df6f30ab-204a-4ca7-8a69-4a88575288f2\") " pod="openstack/mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.698646 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm9rx\" (UniqueName: \"kubernetes.io/projected/df6f30ab-204a-4ca7-8a69-4a88575288f2-kube-api-access-dm9rx\") pod \"mariadb-client\" (UID: \"df6f30ab-204a-4ca7-8a69-4a88575288f2\") " pod="openstack/mariadb-client" Nov 21 15:29:18 crc kubenswrapper[4774]: I1121 15:29:18.871642 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:19 crc kubenswrapper[4774]: I1121 15:29:19.107706 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5fcd4a3533155f3916073f82b65b68dc3cd9d2a49aa5984cd2460572eb758611" Nov 21 15:29:19 crc kubenswrapper[4774]: I1121 15:29:19.107769 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:19 crc kubenswrapper[4774]: I1121 15:29:19.127254 4774 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/mariadb-client" oldPodUID="07037dff-47a9-44df-b61a-1f0104cf5583" podUID="df6f30ab-204a-4ca7-8a69-4a88575288f2" Nov 21 15:29:19 crc kubenswrapper[4774]: I1121 15:29:19.267691 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:19 crc kubenswrapper[4774]: W1121 15:29:19.279262 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf6f30ab_204a_4ca7_8a69_4a88575288f2.slice/crio-342672021d08adad1c9b6323a7e3f103981ee908bf467c3afd7d60f7785f39f3 WatchSource:0}: Error finding container 342672021d08adad1c9b6323a7e3f103981ee908bf467c3afd7d60f7785f39f3: Status 404 returned error can't find the container with id 342672021d08adad1c9b6323a7e3f103981ee908bf467c3afd7d60f7785f39f3 Nov 21 15:29:20 crc kubenswrapper[4774]: I1121 15:29:20.109995 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07037dff-47a9-44df-b61a-1f0104cf5583" path="/var/lib/kubelet/pods/07037dff-47a9-44df-b61a-1f0104cf5583/volumes" Nov 21 15:29:20 crc kubenswrapper[4774]: I1121 15:29:20.124445 4774 generic.go:334] "Generic (PLEG): container finished" podID="df6f30ab-204a-4ca7-8a69-4a88575288f2" containerID="24f2c529194cadb74edcc8d180f8479186545e36c89b1c6e3c5d33b4586007f6" exitCode=0 Nov 21 15:29:20 crc kubenswrapper[4774]: I1121 15:29:20.124503 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"df6f30ab-204a-4ca7-8a69-4a88575288f2","Type":"ContainerDied","Data":"24f2c529194cadb74edcc8d180f8479186545e36c89b1c6e3c5d33b4586007f6"} Nov 21 15:29:20 crc kubenswrapper[4774]: I1121 15:29:20.124538 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-client" event={"ID":"df6f30ab-204a-4ca7-8a69-4a88575288f2","Type":"ContainerStarted","Data":"342672021d08adad1c9b6323a7e3f103981ee908bf467c3afd7d60f7785f39f3"} Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.386297 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.406580 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_mariadb-client_df6f30ab-204a-4ca7-8a69-4a88575288f2/mariadb-client/0.log" Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.436423 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.457948 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-client"] Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.522853 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm9rx\" (UniqueName: \"kubernetes.io/projected/df6f30ab-204a-4ca7-8a69-4a88575288f2-kube-api-access-dm9rx\") pod \"df6f30ab-204a-4ca7-8a69-4a88575288f2\" (UID: \"df6f30ab-204a-4ca7-8a69-4a88575288f2\") " Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.534568 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df6f30ab-204a-4ca7-8a69-4a88575288f2-kube-api-access-dm9rx" (OuterVolumeSpecName: "kube-api-access-dm9rx") pod "df6f30ab-204a-4ca7-8a69-4a88575288f2" (UID: "df6f30ab-204a-4ca7-8a69-4a88575288f2"). InnerVolumeSpecName "kube-api-access-dm9rx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:29:21 crc kubenswrapper[4774]: I1121 15:29:21.625278 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm9rx\" (UniqueName: \"kubernetes.io/projected/df6f30ab-204a-4ca7-8a69-4a88575288f2-kube-api-access-dm9rx\") on node \"crc\" DevicePath \"\"" Nov 21 15:29:22 crc kubenswrapper[4774]: I1121 15:29:22.103359 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df6f30ab-204a-4ca7-8a69-4a88575288f2" path="/var/lib/kubelet/pods/df6f30ab-204a-4ca7-8a69-4a88575288f2/volumes" Nov 21 15:29:22 crc kubenswrapper[4774]: I1121 15:29:22.139219 4774 scope.go:117] "RemoveContainer" containerID="24f2c529194cadb74edcc8d180f8479186545e36c89b1c6e3c5d33b4586007f6" Nov 21 15:29:22 crc kubenswrapper[4774]: I1121 15:29:22.139301 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-client" Nov 21 15:29:28 crc kubenswrapper[4774]: I1121 15:29:28.092683 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:29:28 crc kubenswrapper[4774]: E1121 15:29:28.093237 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:29:41 crc kubenswrapper[4774]: I1121 15:29:41.093453 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:29:41 crc kubenswrapper[4774]: E1121 15:29:41.094249 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:29:51 crc kubenswrapper[4774]: E1121 15:29:51.538381 4774 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.181:59696->38.102.83.181:39405: read tcp 38.102.83.181:59696->38.102.83.181:39405: read: connection reset by peer Nov 21 15:29:51 crc kubenswrapper[4774]: E1121 15:29:51.943160 4774 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.181:59744->38.102.83.181:39405: write tcp 38.102.83.181:59744->38.102.83.181:39405: write: connection reset by peer Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.012795 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 15:29:54 crc kubenswrapper[4774]: E1121 15:29:54.013496 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df6f30ab-204a-4ca7-8a69-4a88575288f2" containerName="mariadb-client" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.013532 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="df6f30ab-204a-4ca7-8a69-4a88575288f2" containerName="mariadb-client" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.014910 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="df6f30ab-204a-4ca7-8a69-4a88575288f2" containerName="mariadb-client" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.017069 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.021572 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.023315 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.023380 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.023428 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-8gvm7" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.023938 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.031326 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.033491 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.041167 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.049235 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.059207 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.093627 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:29:54 crc kubenswrapper[4774]: E1121 15:29:54.093865 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.131948 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132025 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132115 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132155 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132179 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr7sb\" (UniqueName: \"kubernetes.io/projected/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-kube-api-access-nr7sb\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132205 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-config\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132231 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-config\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132250 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bkhl\" (UniqueName: \"kubernetes.io/projected/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-kube-api-access-2bkhl\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132273 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132291 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.132319 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.195252 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.196683 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.199252 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.200621 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.201273 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-8zmn7" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.205497 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.231698 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233011 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233322 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233374 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233424 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95b1e4bc-598a-40b1-ba11-38997e4c5f41-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233458 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233479 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b1e4bc-598a-40b1-ba11-38997e4c5f41-config\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233512 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233548 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95b1e4bc-598a-40b1-ba11-38997e4c5f41-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233570 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95b1e4bc-598a-40b1-ba11-38997e4c5f41-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233590 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkj22\" (UniqueName: \"kubernetes.io/projected/95b1e4bc-598a-40b1-ba11-38997e4c5f41-kube-api-access-qkj22\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233650 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233674 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ba491375-e450-4340-852d-1bd107b6152f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba491375-e450-4340-852d-1bd107b6152f\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233701 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr7sb\" (UniqueName: \"kubernetes.io/projected/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-kube-api-access-nr7sb\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233724 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-config\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233767 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-config\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233827 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bkhl\" (UniqueName: \"kubernetes.io/projected/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-kube-api-access-2bkhl\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233853 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233876 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.233907 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.234641 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-scripts\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.235643 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-config\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.235657 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.235882 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-config\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.236024 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-ovsdb-rundir\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.236034 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.241092 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.242894 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.250887 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.256722 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.256778 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d530fdd96f3ffbc2b1c2f3ea2fad5de4d75823d4f2eec7c9a887005814e3ded3/globalmount\"" pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.257377 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.257597 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e8ff12fc4f088800c622c3d1340bf98c16cfe5dd2ad0e338dceafbf6692b0f73/globalmount\"" pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.263721 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-combined-ca-bundle\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.271595 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr7sb\" (UniqueName: \"kubernetes.io/projected/24d15b5b-e658-4b9b-8d23-4ecaf3308bc5-kube-api-access-nr7sb\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.281620 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bkhl\" (UniqueName: \"kubernetes.io/projected/94b57d87-612f-4d44-84f2-9cd3ffcf5ff7-kube-api-access-2bkhl\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.287896 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.313280 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335084 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prxvj\" (UniqueName: \"kubernetes.io/projected/42447951-828b-43e4-af24-86669d2c25b9-kube-api-access-prxvj\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335148 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c77605fe-1da0-4848-9319-1235551dd807-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335172 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11da5c53-952b-4bb6-bb73-a47bd209d574-config\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335192 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42447951-828b-43e4-af24-86669d2c25b9-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335221 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/11da5c53-952b-4bb6-bb73-a47bd209d574-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335259 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42447951-828b-43e4-af24-86669d2c25b9-config\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335280 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnrk5\" (UniqueName: \"kubernetes.io/projected/c77605fe-1da0-4848-9319-1235551dd807-kube-api-access-dnrk5\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335308 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335335 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95b1e4bc-598a-40b1-ba11-38997e4c5f41-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335360 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335422 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b1e4bc-598a-40b1-ba11-38997e4c5f41-config\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335467 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42447951-828b-43e4-af24-86669d2c25b9-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335488 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95b1e4bc-598a-40b1-ba11-38997e4c5f41-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335508 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95b1e4bc-598a-40b1-ba11-38997e4c5f41-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335579 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkj22\" (UniqueName: \"kubernetes.io/projected/95b1e4bc-598a-40b1-ba11-38997e4c5f41-kube-api-access-qkj22\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335628 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335672 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ba491375-e450-4340-852d-1bd107b6152f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba491375-e450-4340-852d-1bd107b6152f\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335709 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjzcq\" (UniqueName: \"kubernetes.io/projected/11da5c53-952b-4bb6-bb73-a47bd209d574-kube-api-access-vjzcq\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335733 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/11da5c53-952b-4bb6-bb73-a47bd209d574-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335764 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c77605fe-1da0-4848-9319-1235551dd807-config\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335804 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/42447951-828b-43e4-af24-86669d2c25b9-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335860 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c77605fe-1da0-4848-9319-1235551dd807-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335908 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77605fe-1da0-4848-9319-1235551dd807-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.335956 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11da5c53-952b-4bb6-bb73-a47bd209d574-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.337390 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/95b1e4bc-598a-40b1-ba11-38997e4c5f41-ovsdb-rundir\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.337640 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b1e4bc-598a-40b1-ba11-38997e4c5f41-config\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.341921 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95b1e4bc-598a-40b1-ba11-38997e4c5f41-scripts\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.367656 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95b1e4bc-598a-40b1-ba11-38997e4c5f41-combined-ca-bundle\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.368446 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkj22\" (UniqueName: \"kubernetes.io/projected/95b1e4bc-598a-40b1-ba11-38997e4c5f41-kube-api-access-qkj22\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.416492 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.416533 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ba491375-e450-4340-852d-1bd107b6152f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba491375-e450-4340-852d-1bd107b6152f\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/cd521b0c1c5e83f563125f30366c839830363b36a827fdbfc3a26c4d71f2ad20/globalmount\"" pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437470 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42447951-828b-43e4-af24-86669d2c25b9-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437517 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437556 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjzcq\" (UniqueName: \"kubernetes.io/projected/11da5c53-952b-4bb6-bb73-a47bd209d574-kube-api-access-vjzcq\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437577 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/11da5c53-952b-4bb6-bb73-a47bd209d574-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437602 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c77605fe-1da0-4848-9319-1235551dd807-config\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437625 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/42447951-828b-43e4-af24-86669d2c25b9-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437644 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c77605fe-1da0-4848-9319-1235551dd807-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437668 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77605fe-1da0-4848-9319-1235551dd807-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437688 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11da5c53-952b-4bb6-bb73-a47bd209d574-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437710 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prxvj\" (UniqueName: \"kubernetes.io/projected/42447951-828b-43e4-af24-86669d2c25b9-kube-api-access-prxvj\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437731 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c77605fe-1da0-4848-9319-1235551dd807-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437746 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11da5c53-952b-4bb6-bb73-a47bd209d574-config\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437767 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42447951-828b-43e4-af24-86669d2c25b9-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/11da5c53-952b-4bb6-bb73-a47bd209d574-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437840 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42447951-828b-43e4-af24-86669d2c25b9-config\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437858 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnrk5\" (UniqueName: \"kubernetes.io/projected/c77605fe-1da0-4848-9319-1235551dd807-kube-api-access-dnrk5\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437881 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.437901 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.438220 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ae0c744d-0595-4335-b7fe-04ec3e2c9356\") pod \"ovsdbserver-sb-0\" (UID: \"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7\") " pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.438600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42447951-828b-43e4-af24-86669d2c25b9-scripts\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.438735 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/42447951-828b-43e4-af24-86669d2c25b9-ovsdb-rundir\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.438907 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c77605fe-1da0-4848-9319-1235551dd807-ovsdb-rundir\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.439111 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11da5c53-952b-4bb6-bb73-a47bd209d574-config\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.439382 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c77605fe-1da0-4848-9319-1235551dd807-config\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.441263 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c77605fe-1da0-4848-9319-1235551dd807-scripts\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.442959 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11da5c53-952b-4bb6-bb73-a47bd209d574-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.443485 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/11da5c53-952b-4bb6-bb73-a47bd209d574-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.443746 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42447951-828b-43e4-af24-86669d2c25b9-config\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.443923 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.444206 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f5fdef323846faed682b7266038c16fe3853cab71fa5f5d16b918bd22e12fc5/globalmount\"" pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.444406 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c77605fe-1da0-4848-9319-1235551dd807-combined-ca-bundle\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.444664 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42447951-828b-43e4-af24-86669d2c25b9-combined-ca-bundle\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.448215 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.448250 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/600313311f2cb39697a9869f9a7238f0a20aa26677434b0a988136415379b8f6/globalmount\"" pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.448345 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.448364 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/09eac7b4c3e2fee1c6d05faac21e9258bf5d8201ae09bf38aab53776e721191f/globalmount\"" pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.452329 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ba491375-e450-4340-852d-1bd107b6152f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ba491375-e450-4340-852d-1bd107b6152f\") pod \"ovsdbserver-sb-2\" (UID: \"95b1e4bc-598a-40b1-ba11-38997e4c5f41\") " pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.453260 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/11da5c53-952b-4bb6-bb73-a47bd209d574-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.454877 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-813cc6bb-ca44-4ef5-81fc-ee5499cc0334\") pod \"ovsdbserver-sb-1\" (UID: \"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5\") " pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.462502 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prxvj\" (UniqueName: \"kubernetes.io/projected/42447951-828b-43e4-af24-86669d2c25b9-kube-api-access-prxvj\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.464361 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjzcq\" (UniqueName: \"kubernetes.io/projected/11da5c53-952b-4bb6-bb73-a47bd209d574-kube-api-access-vjzcq\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.476626 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnrk5\" (UniqueName: \"kubernetes.io/projected/c77605fe-1da0-4848-9319-1235551dd807-kube-api-access-dnrk5\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.484112 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-79d8f35f-41ea-40a0-b979-b9d8d088b439\") pod \"ovsdbserver-nb-0\" (UID: \"11da5c53-952b-4bb6-bb73-a47bd209d574\") " pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.486019 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab25ff71-595f-4fd9-85c8-e7eb819d5c22\") pod \"ovsdbserver-nb-2\" (UID: \"c77605fe-1da0-4848-9319-1235551dd807\") " pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.486032 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c3acaca3-2fa7-4812-a51c-2405cb7ac225\") pod \"ovsdbserver-nb-1\" (UID: \"42447951-828b-43e4-af24-86669d2c25b9\") " pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.513363 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.646578 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.652591 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.680930 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.684141 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.706433 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:54 crc kubenswrapper[4774]: I1121 15:29:54.982697 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.245468 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Nov 21 15:29:55 crc kubenswrapper[4774]: W1121 15:29:55.253545 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94b57d87_612f_4d44_84f2_9cd3ffcf5ff7.slice/crio-57fcdebdc2224a2bffbbcce193307185b6b9e9d8f571f4aceba4e1a0022a2fd5 WatchSource:0}: Error finding container 57fcdebdc2224a2bffbbcce193307185b6b9e9d8f571f4aceba4e1a0022a2fd5: Status 404 returned error can't find the container with id 57fcdebdc2224a2bffbbcce193307185b6b9e9d8f571f4aceba4e1a0022a2fd5 Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.368726 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-1"] Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.410482 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"42447951-828b-43e4-af24-86669d2c25b9","Type":"ContainerStarted","Data":"385e191290bd20a05a45a194ec579e063d2d2a895dded1ce3531a716dfd1f3f4"} Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.412324 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"11da5c53-952b-4bb6-bb73-a47bd209d574","Type":"ContainerStarted","Data":"510541a2d3102bd3c6c059f9c76a44df7485f17fff95079c84cb813ab9362e2d"} Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.412356 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"11da5c53-952b-4bb6-bb73-a47bd209d574","Type":"ContainerStarted","Data":"fbee9dfc66a84a989134c25d87ec36bdaa8867111ce91bfdd3668ae213e00e60"} Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.412367 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"11da5c53-952b-4bb6-bb73-a47bd209d574","Type":"ContainerStarted","Data":"5bddb53de9e5cc8b14e1ee0ba2aa0310b44e6644106786fbc6efe55c2d12560c"} Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.414762 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7","Type":"ContainerStarted","Data":"57fcdebdc2224a2bffbbcce193307185b6b9e9d8f571f4aceba4e1a0022a2fd5"} Nov 21 15:29:55 crc kubenswrapper[4774]: I1121 15:29:55.436471 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=2.436449755 podStartE2EDuration="2.436449755s" podCreationTimestamp="2025-11-21 15:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:55.431277789 +0000 UTC m=+5186.083477048" watchObservedRunningTime="2025-11-21 15:29:55.436449755 +0000 UTC m=+5186.088649014" Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.224405 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-2"] Nov 21 15:29:56 crc kubenswrapper[4774]: W1121 15:29:56.226328 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95b1e4bc_598a_40b1_ba11_38997e4c5f41.slice/crio-b4e644fed4951b97df75d5e38da561c34de71ded417d2dddd7289222f39864ef WatchSource:0}: Error finding container b4e644fed4951b97df75d5e38da561c34de71ded417d2dddd7289222f39864ef: Status 404 returned error can't find the container with id b4e644fed4951b97df75d5e38da561c34de71ded417d2dddd7289222f39864ef Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.315199 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-2"] Nov 21 15:29:56 crc kubenswrapper[4774]: W1121 15:29:56.316511 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc77605fe_1da0_4848_9319_1235551dd807.slice/crio-3323736473d0f943921b73cc15e77abeb8bd90d26ebb9562cb7d5b18b10e2d04 WatchSource:0}: Error finding container 3323736473d0f943921b73cc15e77abeb8bd90d26ebb9562cb7d5b18b10e2d04: Status 404 returned error can't find the container with id 3323736473d0f943921b73cc15e77abeb8bd90d26ebb9562cb7d5b18b10e2d04 Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.424498 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"c77605fe-1da0-4848-9319-1235551dd807","Type":"ContainerStarted","Data":"3323736473d0f943921b73cc15e77abeb8bd90d26ebb9562cb7d5b18b10e2d04"} Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.426508 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7","Type":"ContainerStarted","Data":"c3ad17fd00277b7d46b4eed1109dd0b162f662855ad0390113dd4ffc12f7a22e"} Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.426556 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"94b57d87-612f-4d44-84f2-9cd3ffcf5ff7","Type":"ContainerStarted","Data":"8f19c068f3ddb419f8fa08fc8b9b269c99c63a5f7142fb4bc8bcaf6fb36bbeec"} Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.428789 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"95b1e4bc-598a-40b1-ba11-38997e4c5f41","Type":"ContainerStarted","Data":"b4e644fed4951b97df75d5e38da561c34de71ded417d2dddd7289222f39864ef"} Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.432430 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"42447951-828b-43e4-af24-86669d2c25b9","Type":"ContainerStarted","Data":"8b4d2985039bbd596debc1f9fe1f0d049b6f1c4028e7d97eba802fd5d4863b47"} Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.432536 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-1" event={"ID":"42447951-828b-43e4-af24-86669d2c25b9","Type":"ContainerStarted","Data":"a24103604738cf5fa321e2990a89bd8a6c68e2aeeca00b17e53ac56c7b2dca16"} Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.448685 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=4.44865225 podStartE2EDuration="4.44865225s" podCreationTimestamp="2025-11-21 15:29:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:56.446442098 +0000 UTC m=+5187.098641377" watchObservedRunningTime="2025-11-21 15:29:56.44865225 +0000 UTC m=+5187.100851549" Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.468710 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-1" podStartSLOduration=3.468689548 podStartE2EDuration="3.468689548s" podCreationTimestamp="2025-11-21 15:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:56.461489384 +0000 UTC m=+5187.113688683" watchObservedRunningTime="2025-11-21 15:29:56.468689548 +0000 UTC m=+5187.120888817" Nov 21 15:29:56 crc kubenswrapper[4774]: I1121 15:29:56.976774 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-1"] Nov 21 15:29:56 crc kubenswrapper[4774]: W1121 15:29:56.983577 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24d15b5b_e658_4b9b_8d23_4ecaf3308bc5.slice/crio-b5a69b9d21699d040c3a7e13356a8769d8d0b5a2fa82ef9fb5b5b0df635acaa0 WatchSource:0}: Error finding container b5a69b9d21699d040c3a7e13356a8769d8d0b5a2fa82ef9fb5b5b0df635acaa0: Status 404 returned error can't find the container with id b5a69b9d21699d040c3a7e13356a8769d8d0b5a2fa82ef9fb5b5b0df635acaa0 Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.442910 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5","Type":"ContainerStarted","Data":"5133af36cb9001bf00bc17fbd01742b6eb4586d26316f53f26525a20ae5f5625"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.442965 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5","Type":"ContainerStarted","Data":"413ea9d25125acf46d6c6e1250c7de7f5e389c394a6ebc6b8f19a213a5f9e686"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.442977 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-1" event={"ID":"24d15b5b-e658-4b9b-8d23-4ecaf3308bc5","Type":"ContainerStarted","Data":"b5a69b9d21699d040c3a7e13356a8769d8d0b5a2fa82ef9fb5b5b0df635acaa0"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.444877 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"c77605fe-1da0-4848-9319-1235551dd807","Type":"ContainerStarted","Data":"0f8cd4565497b3b6fbf3f655b322e1776a676abfd68a04092a6462f8ada7c80b"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.444924 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-2" event={"ID":"c77605fe-1da0-4848-9319-1235551dd807","Type":"ContainerStarted","Data":"f78e4846723fc83a551e843037a0a26f78ddb0bebd435dfa88bbbb51830096b4"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.447302 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"95b1e4bc-598a-40b1-ba11-38997e4c5f41","Type":"ContainerStarted","Data":"ca46fbc89a1afe3dcf7c29534f6a8555aaabe0c9fdd08f19b469988fa2d5b2fb"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.447353 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-2" event={"ID":"95b1e4bc-598a-40b1-ba11-38997e4c5f41","Type":"ContainerStarted","Data":"2403eacfe62aa97b59b726a796bc1cccad8a5943360c1e9f4dc798b1286a73ce"} Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.467368 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-1" podStartSLOduration=5.46734799 podStartE2EDuration="5.46734799s" podCreationTimestamp="2025-11-21 15:29:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:57.466483706 +0000 UTC m=+5188.118683005" watchObservedRunningTime="2025-11-21 15:29:57.46734799 +0000 UTC m=+5188.119547259" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.484909 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-2" podStartSLOduration=5.484886787 podStartE2EDuration="5.484886787s" podCreationTimestamp="2025-11-21 15:29:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:57.479839924 +0000 UTC m=+5188.132039193" watchObservedRunningTime="2025-11-21 15:29:57.484886787 +0000 UTC m=+5188.137086066" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.509490 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-2" podStartSLOduration=4.509474094 podStartE2EDuration="4.509474094s" podCreationTimestamp="2025-11-21 15:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:29:57.504283137 +0000 UTC m=+5188.156482396" watchObservedRunningTime="2025-11-21 15:29:57.509474094 +0000 UTC m=+5188.161673353" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.513623 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.647919 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.653474 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.682599 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.684781 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:57 crc kubenswrapper[4774]: I1121 15:29:57.706570 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-1" Nov 21 15:29:59 crc kubenswrapper[4774]: I1121 15:29:59.514346 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Nov 21 15:29:59 crc kubenswrapper[4774]: I1121 15:29:59.647730 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-2" Nov 21 15:29:59 crc kubenswrapper[4774]: I1121 15:29:59.653179 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Nov 21 15:29:59 crc kubenswrapper[4774]: I1121 15:29:59.682524 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-1" Nov 21 15:29:59 crc kubenswrapper[4774]: I1121 15:29:59.684644 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-2" Nov 21 15:29:59 crc kubenswrapper[4774]: I1121 15:29:59.707292 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-1" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.150226 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww"] Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.152078 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.155063 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.155251 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.157774 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww"] Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.265571 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-secret-volume\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.265618 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzntx\" (UniqueName: \"kubernetes.io/projected/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-kube-api-access-lzntx\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.265719 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-config-volume\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.367495 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-config-volume\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.367849 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-secret-volume\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.367956 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzntx\" (UniqueName: \"kubernetes.io/projected/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-kube-api-access-lzntx\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.368688 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-config-volume\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.378565 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-secret-volume\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.385249 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzntx\" (UniqueName: \"kubernetes.io/projected/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-kube-api-access-lzntx\") pod \"collect-profiles-29395650-96zww\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.480860 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.558785 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.614609 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.703885 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.705799 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-2" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.734742 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-1" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.755641 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-2" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.761163 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.769247 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-1" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.822295 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-1" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.866534 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5768df885f-g7n52"] Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.868267 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.870575 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.878658 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5768df885f-g7n52"] Nov 21 15:30:00 crc kubenswrapper[4774]: I1121 15:30:00.961830 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww"] Nov 21 15:30:00 crc kubenswrapper[4774]: W1121 15:30:00.966429 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0cf48ff_e287_4b4b_b95d_b809b8b5a2e2.slice/crio-505ca99c4960160444838a1b5409f6d1c938b954aa3453525cea0a39b7cd63d3 WatchSource:0}: Error finding container 505ca99c4960160444838a1b5409f6d1c938b954aa3453525cea0a39b7cd63d3: Status 404 returned error can't find the container with id 505ca99c4960160444838a1b5409f6d1c938b954aa3453525cea0a39b7cd63d3 Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.018432 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-dns-svc\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.018682 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-config\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.018742 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-ovsdbserver-nb\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.018980 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8glw\" (UniqueName: \"kubernetes.io/projected/b3114b6d-4329-45e1-9898-98b8a6f3cb42-kube-api-access-q8glw\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.120003 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-dns-svc\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.120097 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-config\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.120118 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-ovsdbserver-nb\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.120157 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8glw\" (UniqueName: \"kubernetes.io/projected/b3114b6d-4329-45e1-9898-98b8a6f3cb42-kube-api-access-q8glw\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.121785 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-dns-svc\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.121852 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-config\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.122575 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-ovsdbserver-nb\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.149844 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8glw\" (UniqueName: \"kubernetes.io/projected/b3114b6d-4329-45e1-9898-98b8a6f3cb42-kube-api-access-q8glw\") pod \"dnsmasq-dns-5768df885f-g7n52\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.236871 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.246115 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5768df885f-g7n52"] Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.268536 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cf5db4575-krjvd"] Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.271715 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.275232 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.286541 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cf5db4575-krjvd"] Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.425450 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sshhl\" (UniqueName: \"kubernetes.io/projected/dc736a06-2f0b-4688-94a8-b8849ec14ef5-kube-api-access-sshhl\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.425516 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.425553 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-dns-svc\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.425573 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-config\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.425636 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.484488 4774 generic.go:334] "Generic (PLEG): container finished" podID="f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" containerID="84726f4d7803321f2f6fc42ea4491b744e590dca967102f35a9047fedd9a318b" exitCode=0 Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.484602 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" event={"ID":"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2","Type":"ContainerDied","Data":"84726f4d7803321f2f6fc42ea4491b744e590dca967102f35a9047fedd9a318b"} Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.484929 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" event={"ID":"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2","Type":"ContainerStarted","Data":"505ca99c4960160444838a1b5409f6d1c938b954aa3453525cea0a39b7cd63d3"} Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.528222 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-dns-svc\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.528265 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-config\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.528322 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.528496 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sshhl\" (UniqueName: \"kubernetes.io/projected/dc736a06-2f0b-4688-94a8-b8849ec14ef5-kube-api-access-sshhl\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.528538 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.529339 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-sb\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.530134 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-config\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.530274 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-nb\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.530936 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-dns-svc\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.536464 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-2" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.538372 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-2" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.545406 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sshhl\" (UniqueName: \"kubernetes.io/projected/dc736a06-2f0b-4688-94a8-b8849ec14ef5-kube-api-access-sshhl\") pod \"dnsmasq-dns-6cf5db4575-krjvd\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.601216 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:01 crc kubenswrapper[4774]: I1121 15:30:01.760756 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5768df885f-g7n52"] Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.073194 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cf5db4575-krjvd"] Nov 21 15:30:02 crc kubenswrapper[4774]: W1121 15:30:02.080794 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc736a06_2f0b_4688_94a8_b8849ec14ef5.slice/crio-e9b337562ae82e5e763c41dca65558e45c0190d5b2a759fd24a7d49c982b6d3b WatchSource:0}: Error finding container e9b337562ae82e5e763c41dca65558e45c0190d5b2a759fd24a7d49c982b6d3b: Status 404 returned error can't find the container with id e9b337562ae82e5e763c41dca65558e45c0190d5b2a759fd24a7d49c982b6d3b Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.502578 4774 generic.go:334] "Generic (PLEG): container finished" podID="b3114b6d-4329-45e1-9898-98b8a6f3cb42" containerID="55cc0bf19a981fe62edfb1d36c3d0e557c100fc56c754e2784cfb09496d76c51" exitCode=0 Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.502655 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5768df885f-g7n52" event={"ID":"b3114b6d-4329-45e1-9898-98b8a6f3cb42","Type":"ContainerDied","Data":"55cc0bf19a981fe62edfb1d36c3d0e557c100fc56c754e2784cfb09496d76c51"} Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.502970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5768df885f-g7n52" event={"ID":"b3114b6d-4329-45e1-9898-98b8a6f3cb42","Type":"ContainerStarted","Data":"8508b9a0919850081360d2d97c93cde1715e5ea218d38a7c1958e5e4a2e3a530"} Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.504736 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerID="8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932" exitCode=0 Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.504778 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" event={"ID":"dc736a06-2f0b-4688-94a8-b8849ec14ef5","Type":"ContainerDied","Data":"8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932"} Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.504849 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" event={"ID":"dc736a06-2f0b-4688-94a8-b8849ec14ef5","Type":"ContainerStarted","Data":"e9b337562ae82e5e763c41dca65558e45c0190d5b2a759fd24a7d49c982b6d3b"} Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.769678 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.798249 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.855980 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-dns-svc\") pod \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.856626 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-secret-volume\") pod \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.856755 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-config\") pod \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.856902 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-ovsdbserver-nb\") pod \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.857033 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-config-volume\") pod \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.857151 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzntx\" (UniqueName: \"kubernetes.io/projected/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-kube-api-access-lzntx\") pod \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\" (UID: \"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.857302 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8glw\" (UniqueName: \"kubernetes.io/projected/b3114b6d-4329-45e1-9898-98b8a6f3cb42-kube-api-access-q8glw\") pod \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\" (UID: \"b3114b6d-4329-45e1-9898-98b8a6f3cb42\") " Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.858239 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-config-volume" (OuterVolumeSpecName: "config-volume") pod "f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" (UID: "f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.861437 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-kube-api-access-lzntx" (OuterVolumeSpecName: "kube-api-access-lzntx") pod "f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" (UID: "f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2"). InnerVolumeSpecName "kube-api-access-lzntx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.861555 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3114b6d-4329-45e1-9898-98b8a6f3cb42-kube-api-access-q8glw" (OuterVolumeSpecName: "kube-api-access-q8glw") pod "b3114b6d-4329-45e1-9898-98b8a6f3cb42" (UID: "b3114b6d-4329-45e1-9898-98b8a6f3cb42"). InnerVolumeSpecName "kube-api-access-q8glw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.861861 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" (UID: "f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.877706 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b3114b6d-4329-45e1-9898-98b8a6f3cb42" (UID: "b3114b6d-4329-45e1-9898-98b8a6f3cb42"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.879080 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b3114b6d-4329-45e1-9898-98b8a6f3cb42" (UID: "b3114b6d-4329-45e1-9898-98b8a6f3cb42"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.880101 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-config" (OuterVolumeSpecName: "config") pod "b3114b6d-4329-45e1-9898-98b8a6f3cb42" (UID: "b3114b6d-4329-45e1-9898-98b8a6f3cb42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959735 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959785 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959802 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959813 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3114b6d-4329-45e1-9898-98b8a6f3cb42-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959837 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959849 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzntx\" (UniqueName: \"kubernetes.io/projected/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2-kube-api-access-lzntx\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:02 crc kubenswrapper[4774]: I1121 15:30:02.959860 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8glw\" (UniqueName: \"kubernetes.io/projected/b3114b6d-4329-45e1-9898-98b8a6f3cb42-kube-api-access-q8glw\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.513417 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" event={"ID":"dc736a06-2f0b-4688-94a8-b8849ec14ef5","Type":"ContainerStarted","Data":"6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14"} Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.513515 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.514933 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.514935 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww" event={"ID":"f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2","Type":"ContainerDied","Data":"505ca99c4960160444838a1b5409f6d1c938b954aa3453525cea0a39b7cd63d3"} Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.515888 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="505ca99c4960160444838a1b5409f6d1c938b954aa3453525cea0a39b7cd63d3" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.517282 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5768df885f-g7n52" event={"ID":"b3114b6d-4329-45e1-9898-98b8a6f3cb42","Type":"ContainerDied","Data":"8508b9a0919850081360d2d97c93cde1715e5ea218d38a7c1958e5e4a2e3a530"} Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.517313 4774 scope.go:117] "RemoveContainer" containerID="55cc0bf19a981fe62edfb1d36c3d0e557c100fc56c754e2784cfb09496d76c51" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.517415 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5768df885f-g7n52" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.540683 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" podStartSLOduration=2.540662138 podStartE2EDuration="2.540662138s" podCreationTimestamp="2025-11-21 15:30:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:03.534283317 +0000 UTC m=+5194.186482576" watchObservedRunningTime="2025-11-21 15:30:03.540662138 +0000 UTC m=+5194.192861397" Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.608513 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5768df885f-g7n52"] Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.616715 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5768df885f-g7n52"] Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.842065 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd"] Nov 21 15:30:03 crc kubenswrapper[4774]: I1121 15:30:03.849107 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395605-qrbtd"] Nov 21 15:30:04 crc kubenswrapper[4774]: I1121 15:30:04.106520 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99841b31-985c-4ca5-bbb7-443fab72f9d8" path="/var/lib/kubelet/pods/99841b31-985c-4ca5-bbb7-443fab72f9d8/volumes" Nov 21 15:30:04 crc kubenswrapper[4774]: I1121 15:30:04.107401 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3114b6d-4329-45e1-9898-98b8a6f3cb42" path="/var/lib/kubelet/pods/b3114b6d-4329-45e1-9898-98b8a6f3cb42/volumes" Nov 21 15:30:04 crc kubenswrapper[4774]: I1121 15:30:04.738124 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-1" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.093639 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:30:07 crc kubenswrapper[4774]: E1121 15:30:07.094434 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.112967 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-copy-data"] Nov 21 15:30:07 crc kubenswrapper[4774]: E1121 15:30:07.113453 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" containerName="collect-profiles" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.113471 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" containerName="collect-profiles" Nov 21 15:30:07 crc kubenswrapper[4774]: E1121 15:30:07.113480 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3114b6d-4329-45e1-9898-98b8a6f3cb42" containerName="init" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.113489 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3114b6d-4329-45e1-9898-98b8a6f3cb42" containerName="init" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.113680 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3114b6d-4329-45e1-9898-98b8a6f3cb42" containerName="init" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.113701 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" containerName="collect-profiles" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.114382 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.116933 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovn-data-cert" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.132639 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.178541 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c0268166-20f0-4778-976c-58373109d561-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.178672 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsjwj\" (UniqueName: \"kubernetes.io/projected/c0268166-20f0-4778-976c-58373109d561-kube-api-access-hsjwj\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.178920 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.280656 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.281313 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c0268166-20f0-4778-976c-58373109d561-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.281372 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsjwj\" (UniqueName: \"kubernetes.io/projected/c0268166-20f0-4778-976c-58373109d561-kube-api-access-hsjwj\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.283595 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.283635 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f8258b6034024fc98e3771a75068d29fbcb966286592670e7edeb88547f3c941/globalmount\"" pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.292485 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c0268166-20f0-4778-976c-58373109d561-ovn-data-cert\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.300225 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsjwj\" (UniqueName: \"kubernetes.io/projected/c0268166-20f0-4778-976c-58373109d561-kube-api-access-hsjwj\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.316790 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") pod \"ovn-copy-data\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " pod="openstack/ovn-copy-data" Nov 21 15:30:07 crc kubenswrapper[4774]: I1121 15:30:07.444564 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 21 15:30:08 crc kubenswrapper[4774]: I1121 15:30:08.354558 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-copy-data"] Nov 21 15:30:08 crc kubenswrapper[4774]: I1121 15:30:08.362797 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:30:08 crc kubenswrapper[4774]: I1121 15:30:08.589385 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"c0268166-20f0-4778-976c-58373109d561","Type":"ContainerStarted","Data":"9e87706406e065df33a5517807cf42030e8f357753d1cd191ad8636076191431"} Nov 21 15:30:11 crc kubenswrapper[4774]: I1121 15:30:11.603118 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:11 crc kubenswrapper[4774]: I1121 15:30:11.633587 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"c0268166-20f0-4778-976c-58373109d561","Type":"ContainerStarted","Data":"67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34"} Nov 21 15:30:11 crc kubenswrapper[4774]: I1121 15:30:11.652369 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665ff86d95-rmd5z"] Nov 21 15:30:11 crc kubenswrapper[4774]: I1121 15:30:11.652608 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" podUID="844ca634-53e5-4246-880b-96811b359e81" containerName="dnsmasq-dns" containerID="cri-o://c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a" gracePeriod=10 Nov 21 15:30:11 crc kubenswrapper[4774]: I1121 15:30:11.664772 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-copy-data" podStartSLOduration=2.8418776230000002 podStartE2EDuration="5.664754696s" podCreationTimestamp="2025-11-21 15:30:06 +0000 UTC" firstStartedPulling="2025-11-21 15:30:08.362549063 +0000 UTC m=+5199.014748322" lastFinishedPulling="2025-11-21 15:30:11.185426126 +0000 UTC m=+5201.837625395" observedRunningTime="2025-11-21 15:30:11.65645072 +0000 UTC m=+5202.308649979" watchObservedRunningTime="2025-11-21 15:30:11.664754696 +0000 UTC m=+5202.316953955" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.114008 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.169773 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5kxp\" (UniqueName: \"kubernetes.io/projected/844ca634-53e5-4246-880b-96811b359e81-kube-api-access-x5kxp\") pod \"844ca634-53e5-4246-880b-96811b359e81\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.169861 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-config\") pod \"844ca634-53e5-4246-880b-96811b359e81\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.169916 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-dns-svc\") pod \"844ca634-53e5-4246-880b-96811b359e81\" (UID: \"844ca634-53e5-4246-880b-96811b359e81\") " Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.190078 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/844ca634-53e5-4246-880b-96811b359e81-kube-api-access-x5kxp" (OuterVolumeSpecName: "kube-api-access-x5kxp") pod "844ca634-53e5-4246-880b-96811b359e81" (UID: "844ca634-53e5-4246-880b-96811b359e81"). InnerVolumeSpecName "kube-api-access-x5kxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.209920 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-config" (OuterVolumeSpecName: "config") pod "844ca634-53e5-4246-880b-96811b359e81" (UID: "844ca634-53e5-4246-880b-96811b359e81"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.212365 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "844ca634-53e5-4246-880b-96811b359e81" (UID: "844ca634-53e5-4246-880b-96811b359e81"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.271642 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5kxp\" (UniqueName: \"kubernetes.io/projected/844ca634-53e5-4246-880b-96811b359e81-kube-api-access-x5kxp\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.271905 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.271983 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/844ca634-53e5-4246-880b-96811b359e81-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.642637 4774 generic.go:334] "Generic (PLEG): container finished" podID="844ca634-53e5-4246-880b-96811b359e81" containerID="c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a" exitCode=0 Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.642699 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.642730 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" event={"ID":"844ca634-53e5-4246-880b-96811b359e81","Type":"ContainerDied","Data":"c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a"} Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.642762 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-665ff86d95-rmd5z" event={"ID":"844ca634-53e5-4246-880b-96811b359e81","Type":"ContainerDied","Data":"cf8803009b5dcc157944ac75fa64469b1acc899e82ed3e9872a7d7878a81525f"} Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.642780 4774 scope.go:117] "RemoveContainer" containerID="c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.666226 4774 scope.go:117] "RemoveContainer" containerID="48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.678885 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-665ff86d95-rmd5z"] Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.684163 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-665ff86d95-rmd5z"] Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.702283 4774 scope.go:117] "RemoveContainer" containerID="c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a" Nov 21 15:30:12 crc kubenswrapper[4774]: E1121 15:30:12.702622 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a\": container with ID starting with c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a not found: ID does not exist" containerID="c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.702651 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a"} err="failed to get container status \"c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a\": rpc error: code = NotFound desc = could not find container \"c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a\": container with ID starting with c329b0dcc5230944315b762df6dca597baf6aa3745aa5ce7e5711fc4077d6e7a not found: ID does not exist" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.702673 4774 scope.go:117] "RemoveContainer" containerID="48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a" Nov 21 15:30:12 crc kubenswrapper[4774]: E1121 15:30:12.703115 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a\": container with ID starting with 48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a not found: ID does not exist" containerID="48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a" Nov 21 15:30:12 crc kubenswrapper[4774]: I1121 15:30:12.703146 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a"} err="failed to get container status \"48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a\": rpc error: code = NotFound desc = could not find container \"48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a\": container with ID starting with 48603fc9545fdd21cef7450ae6201d5a479a08e3bd823161a73872254dc5c00a not found: ID does not exist" Nov 21 15:30:14 crc kubenswrapper[4774]: I1121 15:30:14.109508 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="844ca634-53e5-4246-880b-96811b359e81" path="/var/lib/kubelet/pods/844ca634-53e5-4246-880b-96811b359e81/volumes" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.856027 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Nov 21 15:30:16 crc kubenswrapper[4774]: E1121 15:30:16.856654 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="844ca634-53e5-4246-880b-96811b359e81" containerName="dnsmasq-dns" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.856668 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="844ca634-53e5-4246-880b-96811b359e81" containerName="dnsmasq-dns" Nov 21 15:30:16 crc kubenswrapper[4774]: E1121 15:30:16.856685 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="844ca634-53e5-4246-880b-96811b359e81" containerName="init" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.856694 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="844ca634-53e5-4246-880b-96811b359e81" containerName="init" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.856932 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="844ca634-53e5-4246-880b-96811b359e81" containerName="dnsmasq-dns" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.858023 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.859547 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.859883 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-tsrkd" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.861675 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.878987 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.947086 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-config\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.947172 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-scripts\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.947222 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.947359 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5npn\" (UniqueName: \"kubernetes.io/projected/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-kube-api-access-j5npn\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:16 crc kubenswrapper[4774]: I1121 15:30:16.947488 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.049299 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-scripts\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.049389 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.049431 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5npn\" (UniqueName: \"kubernetes.io/projected/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-kube-api-access-j5npn\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.049481 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.049560 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-config\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.050129 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.050654 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-scripts\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.050757 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-config\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.057146 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.076167 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5npn\" (UniqueName: \"kubernetes.io/projected/09e968b0-0cb1-43c1-b3c6-12873fa2d80e-kube-api-access-j5npn\") pod \"ovn-northd-0\" (UID: \"09e968b0-0cb1-43c1-b3c6-12873fa2d80e\") " pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.193804 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.608532 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Nov 21 15:30:17 crc kubenswrapper[4774]: I1121 15:30:17.687041 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"09e968b0-0cb1-43c1-b3c6-12873fa2d80e","Type":"ContainerStarted","Data":"095edfaece6a5aa368bc5eb4b16061678fdb5fd0f3290c8b42f5a65c70bcf6fc"} Nov 21 15:30:18 crc kubenswrapper[4774]: I1121 15:30:18.093220 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:30:18 crc kubenswrapper[4774]: E1121 15:30:18.093537 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:30:18 crc kubenswrapper[4774]: I1121 15:30:18.700547 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"09e968b0-0cb1-43c1-b3c6-12873fa2d80e","Type":"ContainerStarted","Data":"56494f42126769cea4fcc6db9546db63e5d73fe24b31e047117ca4b5cfa76669"} Nov 21 15:30:18 crc kubenswrapper[4774]: I1121 15:30:18.700888 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"09e968b0-0cb1-43c1-b3c6-12873fa2d80e","Type":"ContainerStarted","Data":"2fd83351af7bda47afc3d3f117b38edab709a5d3a6747a3261bcd3da2a75a81c"} Nov 21 15:30:18 crc kubenswrapper[4774]: I1121 15:30:18.701033 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Nov 21 15:30:18 crc kubenswrapper[4774]: I1121 15:30:18.730664 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.730643502 podStartE2EDuration="2.730643502s" podCreationTimestamp="2025-11-21 15:30:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:18.719166597 +0000 UTC m=+5209.371365876" watchObservedRunningTime="2025-11-21 15:30:18.730643502 +0000 UTC m=+5209.382842751" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.802508 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-55b3-account-create-h5rkb"] Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.804264 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.806396 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.807009 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-gnn8g"] Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.808076 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.814645 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-55b3-account-create-h5rkb"] Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.821295 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gnn8g"] Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.866030 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdmbg\" (UniqueName: \"kubernetes.io/projected/9cdf432d-0afa-49af-8682-8080beee68cb-kube-api-access-mdmbg\") pod \"keystone-db-create-gnn8g\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.866087 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cdf432d-0afa-49af-8682-8080beee68cb-operator-scripts\") pod \"keystone-db-create-gnn8g\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.866153 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fbd6161-8997-44a4-876a-92c03be70e1d-operator-scripts\") pod \"keystone-55b3-account-create-h5rkb\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.866179 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8z2gj\" (UniqueName: \"kubernetes.io/projected/3fbd6161-8997-44a4-876a-92c03be70e1d-kube-api-access-8z2gj\") pod \"keystone-55b3-account-create-h5rkb\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.967738 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdmbg\" (UniqueName: \"kubernetes.io/projected/9cdf432d-0afa-49af-8682-8080beee68cb-kube-api-access-mdmbg\") pod \"keystone-db-create-gnn8g\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.967785 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cdf432d-0afa-49af-8682-8080beee68cb-operator-scripts\") pod \"keystone-db-create-gnn8g\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.967857 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fbd6161-8997-44a4-876a-92c03be70e1d-operator-scripts\") pod \"keystone-55b3-account-create-h5rkb\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.967877 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8z2gj\" (UniqueName: \"kubernetes.io/projected/3fbd6161-8997-44a4-876a-92c03be70e1d-kube-api-access-8z2gj\") pod \"keystone-55b3-account-create-h5rkb\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.968732 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fbd6161-8997-44a4-876a-92c03be70e1d-operator-scripts\") pod \"keystone-55b3-account-create-h5rkb\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.968732 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cdf432d-0afa-49af-8682-8080beee68cb-operator-scripts\") pod \"keystone-db-create-gnn8g\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.986198 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8z2gj\" (UniqueName: \"kubernetes.io/projected/3fbd6161-8997-44a4-876a-92c03be70e1d-kube-api-access-8z2gj\") pod \"keystone-55b3-account-create-h5rkb\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:23 crc kubenswrapper[4774]: I1121 15:30:23.990470 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdmbg\" (UniqueName: \"kubernetes.io/projected/9cdf432d-0afa-49af-8682-8080beee68cb-kube-api-access-mdmbg\") pod \"keystone-db-create-gnn8g\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:24 crc kubenswrapper[4774]: I1121 15:30:24.130908 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:24 crc kubenswrapper[4774]: I1121 15:30:24.138343 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:24 crc kubenswrapper[4774]: I1121 15:30:24.581234 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-gnn8g"] Nov 21 15:30:24 crc kubenswrapper[4774]: W1121 15:30:24.592041 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9cdf432d_0afa_49af_8682_8080beee68cb.slice/crio-a22c31614c350d1b021ef968dc0125d8fa4247248f7a8f4be8a2ec4e215d1070 WatchSource:0}: Error finding container a22c31614c350d1b021ef968dc0125d8fa4247248f7a8f4be8a2ec4e215d1070: Status 404 returned error can't find the container with id a22c31614c350d1b021ef968dc0125d8fa4247248f7a8f4be8a2ec4e215d1070 Nov 21 15:30:24 crc kubenswrapper[4774]: W1121 15:30:24.642199 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3fbd6161_8997_44a4_876a_92c03be70e1d.slice/crio-6d1831a424c4e81e8472db22d4a3d430c7f4599d6e45c27de3f939126227fe4d WatchSource:0}: Error finding container 6d1831a424c4e81e8472db22d4a3d430c7f4599d6e45c27de3f939126227fe4d: Status 404 returned error can't find the container with id 6d1831a424c4e81e8472db22d4a3d430c7f4599d6e45c27de3f939126227fe4d Nov 21 15:30:24 crc kubenswrapper[4774]: I1121 15:30:24.649018 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-55b3-account-create-h5rkb"] Nov 21 15:30:24 crc kubenswrapper[4774]: I1121 15:30:24.759567 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gnn8g" event={"ID":"9cdf432d-0afa-49af-8682-8080beee68cb","Type":"ContainerStarted","Data":"a22c31614c350d1b021ef968dc0125d8fa4247248f7a8f4be8a2ec4e215d1070"} Nov 21 15:30:24 crc kubenswrapper[4774]: I1121 15:30:24.761875 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55b3-account-create-h5rkb" event={"ID":"3fbd6161-8997-44a4-876a-92c03be70e1d","Type":"ContainerStarted","Data":"6d1831a424c4e81e8472db22d4a3d430c7f4599d6e45c27de3f939126227fe4d"} Nov 21 15:30:25 crc kubenswrapper[4774]: I1121 15:30:25.769883 4774 generic.go:334] "Generic (PLEG): container finished" podID="3fbd6161-8997-44a4-876a-92c03be70e1d" containerID="497639a1b1b6c13089297e4c0b054b5c78d2298e97a2cf5b2172f5d18e00ced5" exitCode=0 Nov 21 15:30:25 crc kubenswrapper[4774]: I1121 15:30:25.770182 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55b3-account-create-h5rkb" event={"ID":"3fbd6161-8997-44a4-876a-92c03be70e1d","Type":"ContainerDied","Data":"497639a1b1b6c13089297e4c0b054b5c78d2298e97a2cf5b2172f5d18e00ced5"} Nov 21 15:30:25 crc kubenswrapper[4774]: I1121 15:30:25.772229 4774 generic.go:334] "Generic (PLEG): container finished" podID="9cdf432d-0afa-49af-8682-8080beee68cb" containerID="0f44471e9903af560b22becb34c09c17ca5f2151bfef4f2fc597c810c3510659" exitCode=0 Nov 21 15:30:25 crc kubenswrapper[4774]: I1121 15:30:25.772257 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gnn8g" event={"ID":"9cdf432d-0afa-49af-8682-8080beee68cb","Type":"ContainerDied","Data":"0f44471e9903af560b22becb34c09c17ca5f2151bfef4f2fc597c810c3510659"} Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.231183 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.238619 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.319162 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdmbg\" (UniqueName: \"kubernetes.io/projected/9cdf432d-0afa-49af-8682-8080beee68cb-kube-api-access-mdmbg\") pod \"9cdf432d-0afa-49af-8682-8080beee68cb\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.319637 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cdf432d-0afa-49af-8682-8080beee68cb-operator-scripts\") pod \"9cdf432d-0afa-49af-8682-8080beee68cb\" (UID: \"9cdf432d-0afa-49af-8682-8080beee68cb\") " Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.319766 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fbd6161-8997-44a4-876a-92c03be70e1d-operator-scripts\") pod \"3fbd6161-8997-44a4-876a-92c03be70e1d\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.319850 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8z2gj\" (UniqueName: \"kubernetes.io/projected/3fbd6161-8997-44a4-876a-92c03be70e1d-kube-api-access-8z2gj\") pod \"3fbd6161-8997-44a4-876a-92c03be70e1d\" (UID: \"3fbd6161-8997-44a4-876a-92c03be70e1d\") " Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.320276 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cdf432d-0afa-49af-8682-8080beee68cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9cdf432d-0afa-49af-8682-8080beee68cb" (UID: "9cdf432d-0afa-49af-8682-8080beee68cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.320561 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fbd6161-8997-44a4-876a-92c03be70e1d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3fbd6161-8997-44a4-876a-92c03be70e1d" (UID: "3fbd6161-8997-44a4-876a-92c03be70e1d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.325420 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cdf432d-0afa-49af-8682-8080beee68cb-kube-api-access-mdmbg" (OuterVolumeSpecName: "kube-api-access-mdmbg") pod "9cdf432d-0afa-49af-8682-8080beee68cb" (UID: "9cdf432d-0afa-49af-8682-8080beee68cb"). InnerVolumeSpecName "kube-api-access-mdmbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.330042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fbd6161-8997-44a4-876a-92c03be70e1d-kube-api-access-8z2gj" (OuterVolumeSpecName: "kube-api-access-8z2gj") pod "3fbd6161-8997-44a4-876a-92c03be70e1d" (UID: "3fbd6161-8997-44a4-876a-92c03be70e1d"). InnerVolumeSpecName "kube-api-access-8z2gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.421798 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8z2gj\" (UniqueName: \"kubernetes.io/projected/3fbd6161-8997-44a4-876a-92c03be70e1d-kube-api-access-8z2gj\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.421870 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdmbg\" (UniqueName: \"kubernetes.io/projected/9cdf432d-0afa-49af-8682-8080beee68cb-kube-api-access-mdmbg\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.421882 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cdf432d-0afa-49af-8682-8080beee68cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.421895 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3fbd6161-8997-44a4-876a-92c03be70e1d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.796040 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-gnn8g" event={"ID":"9cdf432d-0afa-49af-8682-8080beee68cb","Type":"ContainerDied","Data":"a22c31614c350d1b021ef968dc0125d8fa4247248f7a8f4be8a2ec4e215d1070"} Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.796086 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a22c31614c350d1b021ef968dc0125d8fa4247248f7a8f4be8a2ec4e215d1070" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.796134 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-gnn8g" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.799103 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-55b3-account-create-h5rkb" event={"ID":"3fbd6161-8997-44a4-876a-92c03be70e1d","Type":"ContainerDied","Data":"6d1831a424c4e81e8472db22d4a3d430c7f4599d6e45c27de3f939126227fe4d"} Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.799145 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d1831a424c4e81e8472db22d4a3d430c7f4599d6e45c27de3f939126227fe4d" Nov 21 15:30:27 crc kubenswrapper[4774]: I1121 15:30:27.799214 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-55b3-account-create-h5rkb" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.093199 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:30:29 crc kubenswrapper[4774]: E1121 15:30:29.093923 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.246225 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-gwn8n"] Nov 21 15:30:29 crc kubenswrapper[4774]: E1121 15:30:29.246629 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cdf432d-0afa-49af-8682-8080beee68cb" containerName="mariadb-database-create" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.246648 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cdf432d-0afa-49af-8682-8080beee68cb" containerName="mariadb-database-create" Nov 21 15:30:29 crc kubenswrapper[4774]: E1121 15:30:29.246678 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbd6161-8997-44a4-876a-92c03be70e1d" containerName="mariadb-account-create" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.246689 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbd6161-8997-44a4-876a-92c03be70e1d" containerName="mariadb-account-create" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.246900 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fbd6161-8997-44a4-876a-92c03be70e1d" containerName="mariadb-account-create" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.246928 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cdf432d-0afa-49af-8682-8080beee68cb" containerName="mariadb-database-create" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.247859 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.248892 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-combined-ca-bundle\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.248985 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-config-data\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.249094 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fphrc\" (UniqueName: \"kubernetes.io/projected/f601dba9-4b07-49b9-87b7-16a991d8ea4a-kube-api-access-fphrc\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.249696 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.250336 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.250547 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.251188 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-997cd" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.254691 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gwn8n"] Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.350285 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-config-data\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.350393 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fphrc\" (UniqueName: \"kubernetes.io/projected/f601dba9-4b07-49b9-87b7-16a991d8ea4a-kube-api-access-fphrc\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.350667 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-combined-ca-bundle\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.354608 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-combined-ca-bundle\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.354838 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-config-data\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.366390 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fphrc\" (UniqueName: \"kubernetes.io/projected/f601dba9-4b07-49b9-87b7-16a991d8ea4a-kube-api-access-fphrc\") pod \"keystone-db-sync-gwn8n\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:29 crc kubenswrapper[4774]: I1121 15:30:29.572631 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:30 crc kubenswrapper[4774]: I1121 15:30:30.029953 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-gwn8n"] Nov 21 15:30:30 crc kubenswrapper[4774]: I1121 15:30:30.846273 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gwn8n" event={"ID":"f601dba9-4b07-49b9-87b7-16a991d8ea4a","Type":"ContainerStarted","Data":"b1721b895dd6548a9989fd723fae8be44590c4ade9d06d79112dea7cb4dda75c"} Nov 21 15:30:30 crc kubenswrapper[4774]: I1121 15:30:30.846665 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gwn8n" event={"ID":"f601dba9-4b07-49b9-87b7-16a991d8ea4a","Type":"ContainerStarted","Data":"bc270ac05c1e447037c94b70c7d07a233dd1c6e2b6d43d09d669b88c9493cf8d"} Nov 21 15:30:30 crc kubenswrapper[4774]: I1121 15:30:30.860902 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-gwn8n" podStartSLOduration=1.860881014 podStartE2EDuration="1.860881014s" podCreationTimestamp="2025-11-21 15:30:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:30.859250317 +0000 UTC m=+5221.511449576" watchObservedRunningTime="2025-11-21 15:30:30.860881014 +0000 UTC m=+5221.513080273" Nov 21 15:30:31 crc kubenswrapper[4774]: I1121 15:30:31.856393 4774 generic.go:334] "Generic (PLEG): container finished" podID="f601dba9-4b07-49b9-87b7-16a991d8ea4a" containerID="b1721b895dd6548a9989fd723fae8be44590c4ade9d06d79112dea7cb4dda75c" exitCode=0 Nov 21 15:30:31 crc kubenswrapper[4774]: I1121 15:30:31.856522 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gwn8n" event={"ID":"f601dba9-4b07-49b9-87b7-16a991d8ea4a","Type":"ContainerDied","Data":"b1721b895dd6548a9989fd723fae8be44590c4ade9d06d79112dea7cb4dda75c"} Nov 21 15:30:32 crc kubenswrapper[4774]: I1121 15:30:32.277619 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.188699 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.316666 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-config-data\") pod \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.316797 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-combined-ca-bundle\") pod \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.316859 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fphrc\" (UniqueName: \"kubernetes.io/projected/f601dba9-4b07-49b9-87b7-16a991d8ea4a-kube-api-access-fphrc\") pod \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\" (UID: \"f601dba9-4b07-49b9-87b7-16a991d8ea4a\") " Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.322783 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f601dba9-4b07-49b9-87b7-16a991d8ea4a-kube-api-access-fphrc" (OuterVolumeSpecName: "kube-api-access-fphrc") pod "f601dba9-4b07-49b9-87b7-16a991d8ea4a" (UID: "f601dba9-4b07-49b9-87b7-16a991d8ea4a"). InnerVolumeSpecName "kube-api-access-fphrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.340241 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f601dba9-4b07-49b9-87b7-16a991d8ea4a" (UID: "f601dba9-4b07-49b9-87b7-16a991d8ea4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.361207 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-config-data" (OuterVolumeSpecName: "config-data") pod "f601dba9-4b07-49b9-87b7-16a991d8ea4a" (UID: "f601dba9-4b07-49b9-87b7-16a991d8ea4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.418362 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.418394 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fphrc\" (UniqueName: \"kubernetes.io/projected/f601dba9-4b07-49b9-87b7-16a991d8ea4a-kube-api-access-fphrc\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.418406 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f601dba9-4b07-49b9-87b7-16a991d8ea4a-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.879938 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-gwn8n" event={"ID":"f601dba9-4b07-49b9-87b7-16a991d8ea4a","Type":"ContainerDied","Data":"bc270ac05c1e447037c94b70c7d07a233dd1c6e2b6d43d09d669b88c9493cf8d"} Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.880017 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc270ac05c1e447037c94b70c7d07a233dd1c6e2b6d43d09d669b88c9493cf8d" Nov 21 15:30:33 crc kubenswrapper[4774]: I1121 15:30:33.880113 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-gwn8n" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.111711 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b64df9dbc-s2plv"] Nov 21 15:30:34 crc kubenswrapper[4774]: E1121 15:30:34.112395 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f601dba9-4b07-49b9-87b7-16a991d8ea4a" containerName="keystone-db-sync" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.112422 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f601dba9-4b07-49b9-87b7-16a991d8ea4a" containerName="keystone-db-sync" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.112633 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f601dba9-4b07-49b9-87b7-16a991d8ea4a" containerName="keystone-db-sync" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.113947 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.142361 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b64df9dbc-s2plv"] Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.169979 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-58brh"] Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.171292 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.174592 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-997cd" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.175324 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.175446 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.175599 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.176549 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.206962 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-58brh"] Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.239628 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-nb\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.240675 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-sb\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.240724 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-dns-svc\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.240806 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlw7j\" (UniqueName: \"kubernetes.io/projected/1dfeeb56-c04c-43e1-9572-031dc79efd56-kube-api-access-zlw7j\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.240959 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-config\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.342866 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-fernet-keys\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.342963 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-combined-ca-bundle\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343002 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-scripts\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343164 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-sb\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343213 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-dns-svc\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343259 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlw7j\" (UniqueName: \"kubernetes.io/projected/1dfeeb56-c04c-43e1-9572-031dc79efd56-kube-api-access-zlw7j\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343291 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-credential-keys\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343369 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-config\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343526 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glsbn\" (UniqueName: \"kubernetes.io/projected/7e0a270e-ae5f-4d58-9a38-54054859b3e2-kube-api-access-glsbn\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343593 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-config-data\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.343628 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-nb\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.344531 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-config\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.344551 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-dns-svc\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.344566 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-nb\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.344565 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-sb\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.363100 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlw7j\" (UniqueName: \"kubernetes.io/projected/1dfeeb56-c04c-43e1-9572-031dc79efd56-kube-api-access-zlw7j\") pod \"dnsmasq-dns-b64df9dbc-s2plv\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.440440 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.444750 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glsbn\" (UniqueName: \"kubernetes.io/projected/7e0a270e-ae5f-4d58-9a38-54054859b3e2-kube-api-access-glsbn\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.444835 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-config-data\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.444893 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-fernet-keys\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.444969 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-combined-ca-bundle\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.445005 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-scripts\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.445050 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-credential-keys\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.448457 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-credential-keys\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.448750 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-scripts\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.449583 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-combined-ca-bundle\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.449809 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-fernet-keys\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.450865 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-config-data\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.464798 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glsbn\" (UniqueName: \"kubernetes.io/projected/7e0a270e-ae5f-4d58-9a38-54054859b3e2-kube-api-access-glsbn\") pod \"keystone-bootstrap-58brh\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.492756 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:34 crc kubenswrapper[4774]: I1121 15:30:34.920106 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b64df9dbc-s2plv"] Nov 21 15:30:34 crc kubenswrapper[4774]: W1121 15:30:34.925435 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1dfeeb56_c04c_43e1_9572_031dc79efd56.slice/crio-98859c4979de4e63bed2a97012b468c9f43e8e62665cda2f521931cd276c82a2 WatchSource:0}: Error finding container 98859c4979de4e63bed2a97012b468c9f43e8e62665cda2f521931cd276c82a2: Status 404 returned error can't find the container with id 98859c4979de4e63bed2a97012b468c9f43e8e62665cda2f521931cd276c82a2 Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.077389 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-58brh"] Nov 21 15:30:35 crc kubenswrapper[4774]: W1121 15:30:35.079503 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e0a270e_ae5f_4d58_9a38_54054859b3e2.slice/crio-fbfddf5d003ab5ce2032e4fb5e200594f7f2d50c171229ecb9327a16a48842fd WatchSource:0}: Error finding container fbfddf5d003ab5ce2032e4fb5e200594f7f2d50c171229ecb9327a16a48842fd: Status 404 returned error can't find the container with id fbfddf5d003ab5ce2032e4fb5e200594f7f2d50c171229ecb9327a16a48842fd Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.898251 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-58brh" event={"ID":"7e0a270e-ae5f-4d58-9a38-54054859b3e2","Type":"ContainerStarted","Data":"08dd28986e7b778398e849af0aa12723664c49d426f641f8fafb397674009f41"} Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.898324 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-58brh" event={"ID":"7e0a270e-ae5f-4d58-9a38-54054859b3e2","Type":"ContainerStarted","Data":"fbfddf5d003ab5ce2032e4fb5e200594f7f2d50c171229ecb9327a16a48842fd"} Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.901946 4774 generic.go:334] "Generic (PLEG): container finished" podID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerID="354b9fbcfee59a3bfbea5492448476e669a565134d24b5c3da5566b95eee05b3" exitCode=0 Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.902033 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" event={"ID":"1dfeeb56-c04c-43e1-9572-031dc79efd56","Type":"ContainerDied","Data":"354b9fbcfee59a3bfbea5492448476e669a565134d24b5c3da5566b95eee05b3"} Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.902080 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" event={"ID":"1dfeeb56-c04c-43e1-9572-031dc79efd56","Type":"ContainerStarted","Data":"98859c4979de4e63bed2a97012b468c9f43e8e62665cda2f521931cd276c82a2"} Nov 21 15:30:35 crc kubenswrapper[4774]: I1121 15:30:35.927167 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-58brh" podStartSLOduration=1.927146531 podStartE2EDuration="1.927146531s" podCreationTimestamp="2025-11-21 15:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:35.925257318 +0000 UTC m=+5226.577456597" watchObservedRunningTime="2025-11-21 15:30:35.927146531 +0000 UTC m=+5226.579345800" Nov 21 15:30:36 crc kubenswrapper[4774]: I1121 15:30:36.929328 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" event={"ID":"1dfeeb56-c04c-43e1-9572-031dc79efd56","Type":"ContainerStarted","Data":"727e9b29a7e95e848261c8c8fcd8a786c9c58723037e547997d13a7acfc20bb1"} Nov 21 15:30:36 crc kubenswrapper[4774]: I1121 15:30:36.930548 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:38 crc kubenswrapper[4774]: I1121 15:30:38.953752 4774 generic.go:334] "Generic (PLEG): container finished" podID="7e0a270e-ae5f-4d58-9a38-54054859b3e2" containerID="08dd28986e7b778398e849af0aa12723664c49d426f641f8fafb397674009f41" exitCode=0 Nov 21 15:30:38 crc kubenswrapper[4774]: I1121 15:30:38.953815 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-58brh" event={"ID":"7e0a270e-ae5f-4d58-9a38-54054859b3e2","Type":"ContainerDied","Data":"08dd28986e7b778398e849af0aa12723664c49d426f641f8fafb397674009f41"} Nov 21 15:30:38 crc kubenswrapper[4774]: I1121 15:30:38.980174 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" podStartSLOduration=4.980142014 podStartE2EDuration="4.980142014s" podCreationTimestamp="2025-11-21 15:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:36.958395957 +0000 UTC m=+5227.610595256" watchObservedRunningTime="2025-11-21 15:30:38.980142014 +0000 UTC m=+5229.632341293" Nov 21 15:30:39 crc kubenswrapper[4774]: I1121 15:30:39.948116 4774 scope.go:117] "RemoveContainer" containerID="bd52be5a342ffdb29dc3e5a9611d5df8f96b916802c0e6af988cdba470627421" Nov 21 15:30:39 crc kubenswrapper[4774]: I1121 15:30:39.976456 4774 scope.go:117] "RemoveContainer" containerID="17400af807a23db985ade6fc7122c89b948c7c1858062cc13f5f050c0d2e1268" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.044612 4774 scope.go:117] "RemoveContainer" containerID="97ea2896fb60aea223ff83c1707a98f3faf2258cbf125e009220bf43a22f8a1c" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.097384 4774 scope.go:117] "RemoveContainer" containerID="c7339e43433862310352470391f84aa65906c0e81082b0836ceea13b28fd95d5" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.282796 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.378361 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-combined-ca-bundle\") pod \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.378422 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glsbn\" (UniqueName: \"kubernetes.io/projected/7e0a270e-ae5f-4d58-9a38-54054859b3e2-kube-api-access-glsbn\") pod \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.378494 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-credential-keys\") pod \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.378586 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-fernet-keys\") pod \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.378664 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-config-data\") pod \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.378837 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-scripts\") pod \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\" (UID: \"7e0a270e-ae5f-4d58-9a38-54054859b3e2\") " Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.384572 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-scripts" (OuterVolumeSpecName: "scripts") pod "7e0a270e-ae5f-4d58-9a38-54054859b3e2" (UID: "7e0a270e-ae5f-4d58-9a38-54054859b3e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.384572 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e0a270e-ae5f-4d58-9a38-54054859b3e2-kube-api-access-glsbn" (OuterVolumeSpecName: "kube-api-access-glsbn") pod "7e0a270e-ae5f-4d58-9a38-54054859b3e2" (UID: "7e0a270e-ae5f-4d58-9a38-54054859b3e2"). InnerVolumeSpecName "kube-api-access-glsbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.385124 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7e0a270e-ae5f-4d58-9a38-54054859b3e2" (UID: "7e0a270e-ae5f-4d58-9a38-54054859b3e2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.386272 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7e0a270e-ae5f-4d58-9a38-54054859b3e2" (UID: "7e0a270e-ae5f-4d58-9a38-54054859b3e2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.406202 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-config-data" (OuterVolumeSpecName: "config-data") pod "7e0a270e-ae5f-4d58-9a38-54054859b3e2" (UID: "7e0a270e-ae5f-4d58-9a38-54054859b3e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.407727 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e0a270e-ae5f-4d58-9a38-54054859b3e2" (UID: "7e0a270e-ae5f-4d58-9a38-54054859b3e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.480394 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.480421 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.480431 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.480442 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glsbn\" (UniqueName: \"kubernetes.io/projected/7e0a270e-ae5f-4d58-9a38-54054859b3e2-kube-api-access-glsbn\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.480454 4774 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.480466 4774 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7e0a270e-ae5f-4d58-9a38-54054859b3e2-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.979011 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-58brh" event={"ID":"7e0a270e-ae5f-4d58-9a38-54054859b3e2","Type":"ContainerDied","Data":"fbfddf5d003ab5ce2032e4fb5e200594f7f2d50c171229ecb9327a16a48842fd"} Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.979052 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbfddf5d003ab5ce2032e4fb5e200594f7f2d50c171229ecb9327a16a48842fd" Nov 21 15:30:40 crc kubenswrapper[4774]: I1121 15:30:40.979106 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-58brh" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.050843 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-58brh"] Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.056066 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-58brh"] Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.093544 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:30:41 crc kubenswrapper[4774]: E1121 15:30:41.093857 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.152712 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-tt7dv"] Nov 21 15:30:41 crc kubenswrapper[4774]: E1121 15:30:41.153444 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e0a270e-ae5f-4d58-9a38-54054859b3e2" containerName="keystone-bootstrap" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.153483 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e0a270e-ae5f-4d58-9a38-54054859b3e2" containerName="keystone-bootstrap" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.153852 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e0a270e-ae5f-4d58-9a38-54054859b3e2" containerName="keystone-bootstrap" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.155161 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.160610 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-997cd" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.160630 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.160808 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.160876 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.162031 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.166011 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tt7dv"] Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.194663 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-credential-keys\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.194714 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-combined-ca-bundle\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.194786 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-config-data\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.194883 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-scripts\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.194964 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxqxv\" (UniqueName: \"kubernetes.io/projected/9101eeef-82b1-42af-821d-6061aa431bef-kube-api-access-sxqxv\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.195119 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-fernet-keys\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.296570 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxqxv\" (UniqueName: \"kubernetes.io/projected/9101eeef-82b1-42af-821d-6061aa431bef-kube-api-access-sxqxv\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.296672 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-fernet-keys\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.296726 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-credential-keys\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.296746 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-combined-ca-bundle\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.296772 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-config-data\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.296795 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-scripts\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.301065 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-combined-ca-bundle\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.301172 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-config-data\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.301681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-scripts\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.301855 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-fernet-keys\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.301922 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-credential-keys\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.312775 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxqxv\" (UniqueName: \"kubernetes.io/projected/9101eeef-82b1-42af-821d-6061aa431bef-kube-api-access-sxqxv\") pod \"keystone-bootstrap-tt7dv\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.497973 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.962483 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-tt7dv"] Nov 21 15:30:41 crc kubenswrapper[4774]: I1121 15:30:41.994595 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt7dv" event={"ID":"9101eeef-82b1-42af-821d-6061aa431bef","Type":"ContainerStarted","Data":"e50a4b78cc278819689331b4bd1df85e8af0477813b3224cd5904ffafd2b7e16"} Nov 21 15:30:42 crc kubenswrapper[4774]: I1121 15:30:42.110277 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e0a270e-ae5f-4d58-9a38-54054859b3e2" path="/var/lib/kubelet/pods/7e0a270e-ae5f-4d58-9a38-54054859b3e2/volumes" Nov 21 15:30:43 crc kubenswrapper[4774]: I1121 15:30:43.007810 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt7dv" event={"ID":"9101eeef-82b1-42af-821d-6061aa431bef","Type":"ContainerStarted","Data":"602325a616e3e9bcecd3ccac45856c01f3b925d86144ce97074f56387316f86b"} Nov 21 15:30:43 crc kubenswrapper[4774]: I1121 15:30:43.037786 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-tt7dv" podStartSLOduration=2.037766117 podStartE2EDuration="2.037766117s" podCreationTimestamp="2025-11-21 15:30:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:43.031853529 +0000 UTC m=+5233.684052798" watchObservedRunningTime="2025-11-21 15:30:43.037766117 +0000 UTC m=+5233.689965386" Nov 21 15:30:44 crc kubenswrapper[4774]: I1121 15:30:44.442132 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:30:44 crc kubenswrapper[4774]: I1121 15:30:44.500130 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cf5db4575-krjvd"] Nov 21 15:30:44 crc kubenswrapper[4774]: I1121 15:30:44.500403 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerName="dnsmasq-dns" containerID="cri-o://6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14" gracePeriod=10 Nov 21 15:30:44 crc kubenswrapper[4774]: I1121 15:30:44.974711 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.032501 4774 generic.go:334] "Generic (PLEG): container finished" podID="9101eeef-82b1-42af-821d-6061aa431bef" containerID="602325a616e3e9bcecd3ccac45856c01f3b925d86144ce97074f56387316f86b" exitCode=0 Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.032566 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt7dv" event={"ID":"9101eeef-82b1-42af-821d-6061aa431bef","Type":"ContainerDied","Data":"602325a616e3e9bcecd3ccac45856c01f3b925d86144ce97074f56387316f86b"} Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.036878 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerID="6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14" exitCode=0 Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.036939 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" event={"ID":"dc736a06-2f0b-4688-94a8-b8849ec14ef5","Type":"ContainerDied","Data":"6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14"} Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.036965 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" event={"ID":"dc736a06-2f0b-4688-94a8-b8849ec14ef5","Type":"ContainerDied","Data":"e9b337562ae82e5e763c41dca65558e45c0190d5b2a759fd24a7d49c982b6d3b"} Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.036969 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cf5db4575-krjvd" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.036980 4774 scope.go:117] "RemoveContainer" containerID="6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.064299 4774 scope.go:117] "RemoveContainer" containerID="8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.071179 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-config\") pod \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.071246 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-sb\") pod \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.071380 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-dns-svc\") pod \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.071493 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sshhl\" (UniqueName: \"kubernetes.io/projected/dc736a06-2f0b-4688-94a8-b8849ec14ef5-kube-api-access-sshhl\") pod \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.071524 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-nb\") pod \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\" (UID: \"dc736a06-2f0b-4688-94a8-b8849ec14ef5\") " Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.076863 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc736a06-2f0b-4688-94a8-b8849ec14ef5-kube-api-access-sshhl" (OuterVolumeSpecName: "kube-api-access-sshhl") pod "dc736a06-2f0b-4688-94a8-b8849ec14ef5" (UID: "dc736a06-2f0b-4688-94a8-b8849ec14ef5"). InnerVolumeSpecName "kube-api-access-sshhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.081569 4774 scope.go:117] "RemoveContainer" containerID="6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14" Nov 21 15:30:45 crc kubenswrapper[4774]: E1121 15:30:45.082046 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14\": container with ID starting with 6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14 not found: ID does not exist" containerID="6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.082095 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14"} err="failed to get container status \"6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14\": rpc error: code = NotFound desc = could not find container \"6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14\": container with ID starting with 6dd7662213fe4fcc74b19456f0e1584a4e3317455805acda89f05717e88b9f14 not found: ID does not exist" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.082127 4774 scope.go:117] "RemoveContainer" containerID="8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932" Nov 21 15:30:45 crc kubenswrapper[4774]: E1121 15:30:45.082481 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932\": container with ID starting with 8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932 not found: ID does not exist" containerID="8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.082505 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932"} err="failed to get container status \"8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932\": rpc error: code = NotFound desc = could not find container \"8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932\": container with ID starting with 8db795590985ec76f8f4586c6c66f7c116f19eeabc2b9550d1eb926112a54932 not found: ID does not exist" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.108436 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dc736a06-2f0b-4688-94a8-b8849ec14ef5" (UID: "dc736a06-2f0b-4688-94a8-b8849ec14ef5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.109195 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dc736a06-2f0b-4688-94a8-b8849ec14ef5" (UID: "dc736a06-2f0b-4688-94a8-b8849ec14ef5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.116207 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dc736a06-2f0b-4688-94a8-b8849ec14ef5" (UID: "dc736a06-2f0b-4688-94a8-b8849ec14ef5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.117986 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-config" (OuterVolumeSpecName: "config") pod "dc736a06-2f0b-4688-94a8-b8849ec14ef5" (UID: "dc736a06-2f0b-4688-94a8-b8849ec14ef5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.173664 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sshhl\" (UniqueName: \"kubernetes.io/projected/dc736a06-2f0b-4688-94a8-b8849ec14ef5-kube-api-access-sshhl\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.173710 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.173726 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.173735 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.173743 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc736a06-2f0b-4688-94a8-b8849ec14ef5-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.378617 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6cf5db4575-krjvd"] Nov 21 15:30:45 crc kubenswrapper[4774]: I1121 15:30:45.386249 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6cf5db4575-krjvd"] Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.108494 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" path="/var/lib/kubelet/pods/dc736a06-2f0b-4688-94a8-b8849ec14ef5/volumes" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.437224 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.506457 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-credential-keys\") pod \"9101eeef-82b1-42af-821d-6061aa431bef\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.506511 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-fernet-keys\") pod \"9101eeef-82b1-42af-821d-6061aa431bef\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.506548 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-config-data\") pod \"9101eeef-82b1-42af-821d-6061aa431bef\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.506585 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-scripts\") pod \"9101eeef-82b1-42af-821d-6061aa431bef\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.506644 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxqxv\" (UniqueName: \"kubernetes.io/projected/9101eeef-82b1-42af-821d-6061aa431bef-kube-api-access-sxqxv\") pod \"9101eeef-82b1-42af-821d-6061aa431bef\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.506666 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-combined-ca-bundle\") pod \"9101eeef-82b1-42af-821d-6061aa431bef\" (UID: \"9101eeef-82b1-42af-821d-6061aa431bef\") " Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.513762 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9101eeef-82b1-42af-821d-6061aa431bef" (UID: "9101eeef-82b1-42af-821d-6061aa431bef"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.514970 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9101eeef-82b1-42af-821d-6061aa431bef-kube-api-access-sxqxv" (OuterVolumeSpecName: "kube-api-access-sxqxv") pod "9101eeef-82b1-42af-821d-6061aa431bef" (UID: "9101eeef-82b1-42af-821d-6061aa431bef"). InnerVolumeSpecName "kube-api-access-sxqxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.518925 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9101eeef-82b1-42af-821d-6061aa431bef" (UID: "9101eeef-82b1-42af-821d-6061aa431bef"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.531424 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-scripts" (OuterVolumeSpecName: "scripts") pod "9101eeef-82b1-42af-821d-6061aa431bef" (UID: "9101eeef-82b1-42af-821d-6061aa431bef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.536024 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-config-data" (OuterVolumeSpecName: "config-data") pod "9101eeef-82b1-42af-821d-6061aa431bef" (UID: "9101eeef-82b1-42af-821d-6061aa431bef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.545630 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9101eeef-82b1-42af-821d-6061aa431bef" (UID: "9101eeef-82b1-42af-821d-6061aa431bef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.608398 4774 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.608812 4774 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.608838 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.608850 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.608861 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxqxv\" (UniqueName: \"kubernetes.io/projected/9101eeef-82b1-42af-821d-6061aa431bef-kube-api-access-sxqxv\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:46 crc kubenswrapper[4774]: I1121 15:30:46.608869 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9101eeef-82b1-42af-821d-6061aa431bef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.061162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-tt7dv" event={"ID":"9101eeef-82b1-42af-821d-6061aa431bef","Type":"ContainerDied","Data":"e50a4b78cc278819689331b4bd1df85e8af0477813b3224cd5904ffafd2b7e16"} Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.061216 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e50a4b78cc278819689331b4bd1df85e8af0477813b3224cd5904ffafd2b7e16" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.061297 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-tt7dv" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.142840 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-84f6f47465-k6cnt"] Nov 21 15:30:47 crc kubenswrapper[4774]: E1121 15:30:47.143222 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9101eeef-82b1-42af-821d-6061aa431bef" containerName="keystone-bootstrap" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.143240 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9101eeef-82b1-42af-821d-6061aa431bef" containerName="keystone-bootstrap" Nov 21 15:30:47 crc kubenswrapper[4774]: E1121 15:30:47.143258 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerName="init" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.143266 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerName="init" Nov 21 15:30:47 crc kubenswrapper[4774]: E1121 15:30:47.143300 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerName="dnsmasq-dns" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.143308 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerName="dnsmasq-dns" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.143511 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc736a06-2f0b-4688-94a8-b8849ec14ef5" containerName="dnsmasq-dns" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.143543 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9101eeef-82b1-42af-821d-6061aa431bef" containerName="keystone-bootstrap" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.144237 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.147633 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.147873 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.151380 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.153243 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-997cd" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.155911 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84f6f47465-k6cnt"] Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.219469 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-scripts\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.219516 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6th5h\" (UniqueName: \"kubernetes.io/projected/56867905-10e8-4f90-8716-3a1db96bcb2b-kube-api-access-6th5h\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.219554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-config-data\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.219578 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-fernet-keys\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.219603 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-combined-ca-bundle\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.219621 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-credential-keys\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.321850 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-scripts\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.321958 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6th5h\" (UniqueName: \"kubernetes.io/projected/56867905-10e8-4f90-8716-3a1db96bcb2b-kube-api-access-6th5h\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.321991 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-config-data\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.322027 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-fernet-keys\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.322065 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-combined-ca-bundle\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.322090 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-credential-keys\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.326454 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-scripts\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.327243 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-config-data\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.327766 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-credential-keys\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.331794 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-fernet-keys\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.340048 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56867905-10e8-4f90-8716-3a1db96bcb2b-combined-ca-bundle\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.344133 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6th5h\" (UniqueName: \"kubernetes.io/projected/56867905-10e8-4f90-8716-3a1db96bcb2b-kube-api-access-6th5h\") pod \"keystone-84f6f47465-k6cnt\" (UID: \"56867905-10e8-4f90-8716-3a1db96bcb2b\") " pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:47 crc kubenswrapper[4774]: I1121 15:30:47.478790 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:48 crc kubenswrapper[4774]: I1121 15:30:48.006503 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-84f6f47465-k6cnt"] Nov 21 15:30:48 crc kubenswrapper[4774]: I1121 15:30:48.072962 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84f6f47465-k6cnt" event={"ID":"56867905-10e8-4f90-8716-3a1db96bcb2b","Type":"ContainerStarted","Data":"c28783e18b69afb387ad2db52054dfdf3540cb58ef839f8380d9771406d63a46"} Nov 21 15:30:49 crc kubenswrapper[4774]: I1121 15:30:49.085611 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-84f6f47465-k6cnt" event={"ID":"56867905-10e8-4f90-8716-3a1db96bcb2b","Type":"ContainerStarted","Data":"e89360f7ab06b587604aa2541ba318658241231a745ad237cf4999ed4cb565a6"} Nov 21 15:30:49 crc kubenswrapper[4774]: I1121 15:30:49.086052 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:30:49 crc kubenswrapper[4774]: I1121 15:30:49.109743 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-84f6f47465-k6cnt" podStartSLOduration=2.109724177 podStartE2EDuration="2.109724177s" podCreationTimestamp="2025-11-21 15:30:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:30:49.105786695 +0000 UTC m=+5239.757985964" watchObservedRunningTime="2025-11-21 15:30:49.109724177 +0000 UTC m=+5239.761923446" Nov 21 15:30:53 crc kubenswrapper[4774]: I1121 15:30:53.093586 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:30:53 crc kubenswrapper[4774]: E1121 15:30:53.094427 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:31:07 crc kubenswrapper[4774]: I1121 15:31:07.094414 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:31:07 crc kubenswrapper[4774]: E1121 15:31:07.095425 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:31:18 crc kubenswrapper[4774]: I1121 15:31:18.094203 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:31:18 crc kubenswrapper[4774]: E1121 15:31:18.095044 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:31:18 crc kubenswrapper[4774]: I1121 15:31:18.930200 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-84f6f47465-k6cnt" Nov 21 15:31:23 crc kubenswrapper[4774]: I1121 15:31:23.866067 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 21 15:31:23 crc kubenswrapper[4774]: I1121 15:31:23.873350 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 15:31:23 crc kubenswrapper[4774]: I1121 15:31:23.875504 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-d4fpc" Nov 21 15:31:23 crc kubenswrapper[4774]: I1121 15:31:23.877538 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 15:31:23 crc kubenswrapper[4774]: I1121 15:31:23.878309 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Nov 21 15:31:23 crc kubenswrapper[4774]: I1121 15:31:23.878488 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.025577 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config-secret\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.026087 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.026174 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j4ln\" (UniqueName: \"kubernetes.io/projected/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-kube-api-access-5j4ln\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.127341 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.127404 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j4ln\" (UniqueName: \"kubernetes.io/projected/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-kube-api-access-5j4ln\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.127465 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config-secret\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.128630 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.135252 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config-secret\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.148321 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j4ln\" (UniqueName: \"kubernetes.io/projected/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-kube-api-access-5j4ln\") pod \"openstackclient\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.198443 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 15:31:24 crc kubenswrapper[4774]: I1121 15:31:24.646842 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 15:31:25 crc kubenswrapper[4774]: I1121 15:31:25.448350 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f0b7c953-e181-4c03-bcb8-2bed4066a3fe","Type":"ContainerStarted","Data":"40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe"} Nov 21 15:31:25 crc kubenswrapper[4774]: I1121 15:31:25.448431 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"f0b7c953-e181-4c03-bcb8-2bed4066a3fe","Type":"ContainerStarted","Data":"a76bc2aebb53e70bba69dd34ad1ea0f402c1bda4c3a6d22ee38e28c5fddd35a1"} Nov 21 15:31:25 crc kubenswrapper[4774]: I1121 15:31:25.470268 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.470243674 podStartE2EDuration="2.470243674s" podCreationTimestamp="2025-11-21 15:31:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:31:25.467586399 +0000 UTC m=+5276.119785708" watchObservedRunningTime="2025-11-21 15:31:25.470243674 +0000 UTC m=+5276.122442943" Nov 21 15:31:30 crc kubenswrapper[4774]: I1121 15:31:30.114618 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:31:30 crc kubenswrapper[4774]: E1121 15:31:30.122867 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:31:40 crc kubenswrapper[4774]: I1121 15:31:40.245955 4774 scope.go:117] "RemoveContainer" containerID="07894b185dc2b90ca1f1043b7eeea0be3a44d1ceb2e9312e0770770e42a3151b" Nov 21 15:31:40 crc kubenswrapper[4774]: I1121 15:31:40.285230 4774 scope.go:117] "RemoveContainer" containerID="52b4f9073277db773ac58fabc7062ef8908880d5f7a9afa0e8e05ecd0fe1ec6a" Nov 21 15:31:40 crc kubenswrapper[4774]: I1121 15:31:40.345133 4774 scope.go:117] "RemoveContainer" containerID="932d1edfe5564c75a077105a3aa624b7ff8cf75d2d48584b5168c91404dbed69" Nov 21 15:31:40 crc kubenswrapper[4774]: I1121 15:31:40.379811 4774 scope.go:117] "RemoveContainer" containerID="82e49591f76c39251c278bc199ada53449ccdd580d368d4716516ca63f1143a2" Nov 21 15:31:40 crc kubenswrapper[4774]: I1121 15:31:40.416585 4774 scope.go:117] "RemoveContainer" containerID="747aa1fc52e089ff15528fd34c83c1cef5c8731c67258037e1d15a9db1d8d8ca" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.648328 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f2zwc"] Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.650537 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.665725 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f2zwc"] Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.756478 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-utilities\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.756658 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvnbx\" (UniqueName: \"kubernetes.io/projected/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-kube-api-access-gvnbx\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.756874 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-catalog-content\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.858851 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-catalog-content\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.858943 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-utilities\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.859014 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvnbx\" (UniqueName: \"kubernetes.io/projected/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-kube-api-access-gvnbx\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.859518 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-catalog-content\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.859652 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-utilities\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.882122 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvnbx\" (UniqueName: \"kubernetes.io/projected/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-kube-api-access-gvnbx\") pod \"redhat-operators-f2zwc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:41 crc kubenswrapper[4774]: I1121 15:31:41.974169 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:42 crc kubenswrapper[4774]: I1121 15:31:42.401160 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f2zwc"] Nov 21 15:31:42 crc kubenswrapper[4774]: I1121 15:31:42.622059 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerStarted","Data":"a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db"} Nov 21 15:31:42 crc kubenswrapper[4774]: I1121 15:31:42.622123 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerStarted","Data":"da38a51561903cb3f138ebd1c992dc7ede99bb016d285d39c8a8b5e4f03eaf93"} Nov 21 15:31:43 crc kubenswrapper[4774]: I1121 15:31:43.092931 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:31:43 crc kubenswrapper[4774]: E1121 15:31:43.095074 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:31:43 crc kubenswrapper[4774]: I1121 15:31:43.640122 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerID="a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db" exitCode=0 Nov 21 15:31:43 crc kubenswrapper[4774]: I1121 15:31:43.640193 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerDied","Data":"a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db"} Nov 21 15:31:45 crc kubenswrapper[4774]: I1121 15:31:45.662847 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerID="a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a" exitCode=0 Nov 21 15:31:45 crc kubenswrapper[4774]: I1121 15:31:45.662935 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerDied","Data":"a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a"} Nov 21 15:31:46 crc kubenswrapper[4774]: I1121 15:31:46.679466 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerStarted","Data":"8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f"} Nov 21 15:31:46 crc kubenswrapper[4774]: I1121 15:31:46.707225 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f2zwc" podStartSLOduration=3.048663862 podStartE2EDuration="5.70720448s" podCreationTimestamp="2025-11-21 15:31:41 +0000 UTC" firstStartedPulling="2025-11-21 15:31:43.642735551 +0000 UTC m=+5294.294934840" lastFinishedPulling="2025-11-21 15:31:46.301276189 +0000 UTC m=+5296.953475458" observedRunningTime="2025-11-21 15:31:46.701725724 +0000 UTC m=+5297.353924993" watchObservedRunningTime="2025-11-21 15:31:46.70720448 +0000 UTC m=+5297.359403749" Nov 21 15:31:51 crc kubenswrapper[4774]: I1121 15:31:51.975312 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:51 crc kubenswrapper[4774]: I1121 15:31:51.976931 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:31:53 crc kubenswrapper[4774]: I1121 15:31:53.033263 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-f2zwc" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="registry-server" probeResult="failure" output=< Nov 21 15:31:53 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 15:31:53 crc kubenswrapper[4774]: > Nov 21 15:31:56 crc kubenswrapper[4774]: I1121 15:31:56.093529 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:31:56 crc kubenswrapper[4774]: E1121 15:31:56.094163 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:32:02 crc kubenswrapper[4774]: I1121 15:32:02.048940 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:32:02 crc kubenswrapper[4774]: I1121 15:32:02.104694 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:32:02 crc kubenswrapper[4774]: I1121 15:32:02.293745 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f2zwc"] Nov 21 15:32:03 crc kubenswrapper[4774]: I1121 15:32:03.859986 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f2zwc" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="registry-server" containerID="cri-o://8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f" gracePeriod=2 Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.306048 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.365970 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-utilities\") pod \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.366176 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-catalog-content\") pod \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.366242 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvnbx\" (UniqueName: \"kubernetes.io/projected/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-kube-api-access-gvnbx\") pod \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\" (UID: \"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc\") " Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.367112 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-utilities" (OuterVolumeSpecName: "utilities") pod "b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" (UID: "b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.470917 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.475309 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" (UID: "b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.572591 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.867969 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerID="8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f" exitCode=0 Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.868017 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerDied","Data":"8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f"} Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.868038 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f2zwc" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.868057 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f2zwc" event={"ID":"b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc","Type":"ContainerDied","Data":"da38a51561903cb3f138ebd1c992dc7ede99bb016d285d39c8a8b5e4f03eaf93"} Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.868078 4774 scope.go:117] "RemoveContainer" containerID="8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.938286 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-kube-api-access-gvnbx" (OuterVolumeSpecName: "kube-api-access-gvnbx") pod "b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" (UID: "b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc"). InnerVolumeSpecName "kube-api-access-gvnbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.979674 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvnbx\" (UniqueName: \"kubernetes.io/projected/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc-kube-api-access-gvnbx\") on node \"crc\" DevicePath \"\"" Nov 21 15:32:04 crc kubenswrapper[4774]: I1121 15:32:04.999490 4774 scope.go:117] "RemoveContainer" containerID="a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.021098 4774 scope.go:117] "RemoveContainer" containerID="a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.061414 4774 scope.go:117] "RemoveContainer" containerID="8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f" Nov 21 15:32:05 crc kubenswrapper[4774]: E1121 15:32:05.061938 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f\": container with ID starting with 8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f not found: ID does not exist" containerID="8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.062040 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f"} err="failed to get container status \"8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f\": rpc error: code = NotFound desc = could not find container \"8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f\": container with ID starting with 8fd29ccaec3eeae13fc1b82029ec6be8e656e3914edbba56b9bfd4dc3e099b2f not found: ID does not exist" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.062086 4774 scope.go:117] "RemoveContainer" containerID="a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a" Nov 21 15:32:05 crc kubenswrapper[4774]: E1121 15:32:05.062427 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a\": container with ID starting with a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a not found: ID does not exist" containerID="a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.062577 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a"} err="failed to get container status \"a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a\": rpc error: code = NotFound desc = could not find container \"a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a\": container with ID starting with a20294e8bf3cb191bc9673bbbbf4759c5a696d877a169650dff9a5429e24dd5a not found: ID does not exist" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.062702 4774 scope.go:117] "RemoveContainer" containerID="a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db" Nov 21 15:32:05 crc kubenswrapper[4774]: E1121 15:32:05.063199 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db\": container with ID starting with a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db not found: ID does not exist" containerID="a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.063235 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db"} err="failed to get container status \"a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db\": rpc error: code = NotFound desc = could not find container \"a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db\": container with ID starting with a6b255507045ac1cb83988953c72089d5254633b5201f35434212b8aff70d6db not found: ID does not exist" Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.204921 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f2zwc"] Nov 21 15:32:05 crc kubenswrapper[4774]: I1121 15:32:05.213101 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f2zwc"] Nov 21 15:32:06 crc kubenswrapper[4774]: I1121 15:32:06.110033 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" path="/var/lib/kubelet/pods/b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc/volumes" Nov 21 15:32:11 crc kubenswrapper[4774]: I1121 15:32:11.093259 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:32:11 crc kubenswrapper[4774]: E1121 15:32:11.093960 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:32:23 crc kubenswrapper[4774]: I1121 15:32:23.094484 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:32:23 crc kubenswrapper[4774]: E1121 15:32:23.095775 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:32:37 crc kubenswrapper[4774]: I1121 15:32:37.093423 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:32:37 crc kubenswrapper[4774]: E1121 15:32:37.094286 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:32:49 crc kubenswrapper[4774]: I1121 15:32:49.093256 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:32:49 crc kubenswrapper[4774]: E1121 15:32:49.093964 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.037214 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-fbwmf"] Nov 21 15:33:00 crc kubenswrapper[4774]: E1121 15:33:00.038610 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="extract-content" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.038643 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="extract-content" Nov 21 15:33:00 crc kubenswrapper[4774]: E1121 15:33:00.038685 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="registry-server" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.038701 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="registry-server" Nov 21 15:33:00 crc kubenswrapper[4774]: E1121 15:33:00.038752 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="extract-utilities" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.038769 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="extract-utilities" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.039181 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4ee62a5-a9bb-40dd-9ad5-7b21e0132ebc" containerName="registry-server" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.040423 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.045854 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fbwmf"] Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.089492 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmjcc\" (UniqueName: \"kubernetes.io/projected/a58f797a-f829-44c8-8339-90e553652d85-kube-api-access-nmjcc\") pod \"barbican-db-create-fbwmf\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.089879 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58f797a-f829-44c8-8339-90e553652d85-operator-scripts\") pod \"barbican-db-create-fbwmf\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.130462 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-ca6d-account-create-dkgrf"] Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.131650 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.133997 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.140987 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-ca6d-account-create-dkgrf"] Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.192018 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmjcc\" (UniqueName: \"kubernetes.io/projected/a58f797a-f829-44c8-8339-90e553652d85-kube-api-access-nmjcc\") pod \"barbican-db-create-fbwmf\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.192101 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58f797a-f829-44c8-8339-90e553652d85-operator-scripts\") pod \"barbican-db-create-fbwmf\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.192138 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d410396-7f33-4931-aea3-fc85b62814f0-operator-scripts\") pod \"barbican-ca6d-account-create-dkgrf\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.192181 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t92zm\" (UniqueName: \"kubernetes.io/projected/6d410396-7f33-4931-aea3-fc85b62814f0-kube-api-access-t92zm\") pod \"barbican-ca6d-account-create-dkgrf\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.193162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58f797a-f829-44c8-8339-90e553652d85-operator-scripts\") pod \"barbican-db-create-fbwmf\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.212103 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmjcc\" (UniqueName: \"kubernetes.io/projected/a58f797a-f829-44c8-8339-90e553652d85-kube-api-access-nmjcc\") pod \"barbican-db-create-fbwmf\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.293869 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d410396-7f33-4931-aea3-fc85b62814f0-operator-scripts\") pod \"barbican-ca6d-account-create-dkgrf\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.293932 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t92zm\" (UniqueName: \"kubernetes.io/projected/6d410396-7f33-4931-aea3-fc85b62814f0-kube-api-access-t92zm\") pod \"barbican-ca6d-account-create-dkgrf\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.294869 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d410396-7f33-4931-aea3-fc85b62814f0-operator-scripts\") pod \"barbican-ca6d-account-create-dkgrf\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.310640 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t92zm\" (UniqueName: \"kubernetes.io/projected/6d410396-7f33-4931-aea3-fc85b62814f0-kube-api-access-t92zm\") pod \"barbican-ca6d-account-create-dkgrf\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.372622 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.448762 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.794556 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fbwmf"] Nov 21 15:33:00 crc kubenswrapper[4774]: I1121 15:33:00.902526 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-ca6d-account-create-dkgrf"] Nov 21 15:33:00 crc kubenswrapper[4774]: W1121 15:33:00.908739 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6d410396_7f33_4931_aea3_fc85b62814f0.slice/crio-a40bb880f3b9546d4dcf8578dd27f05715af1590b3d807dd48c0437ef910e9eb WatchSource:0}: Error finding container a40bb880f3b9546d4dcf8578dd27f05715af1590b3d807dd48c0437ef910e9eb: Status 404 returned error can't find the container with id a40bb880f3b9546d4dcf8578dd27f05715af1590b3d807dd48c0437ef910e9eb Nov 21 15:33:01 crc kubenswrapper[4774]: I1121 15:33:01.414857 4774 generic.go:334] "Generic (PLEG): container finished" podID="6d410396-7f33-4931-aea3-fc85b62814f0" containerID="dd2deea2401320b381a43022556e48db3a44b54ef1e4cc8fbc90a6b1081d9a32" exitCode=0 Nov 21 15:33:01 crc kubenswrapper[4774]: I1121 15:33:01.414925 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ca6d-account-create-dkgrf" event={"ID":"6d410396-7f33-4931-aea3-fc85b62814f0","Type":"ContainerDied","Data":"dd2deea2401320b381a43022556e48db3a44b54ef1e4cc8fbc90a6b1081d9a32"} Nov 21 15:33:01 crc kubenswrapper[4774]: I1121 15:33:01.414954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ca6d-account-create-dkgrf" event={"ID":"6d410396-7f33-4931-aea3-fc85b62814f0","Type":"ContainerStarted","Data":"a40bb880f3b9546d4dcf8578dd27f05715af1590b3d807dd48c0437ef910e9eb"} Nov 21 15:33:01 crc kubenswrapper[4774]: I1121 15:33:01.418066 4774 generic.go:334] "Generic (PLEG): container finished" podID="a58f797a-f829-44c8-8339-90e553652d85" containerID="013a3eab950769e10a370cb2855c6cf818a27cac0ec6c87662c04a2ea1eac0da" exitCode=0 Nov 21 15:33:01 crc kubenswrapper[4774]: I1121 15:33:01.418150 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fbwmf" event={"ID":"a58f797a-f829-44c8-8339-90e553652d85","Type":"ContainerDied","Data":"013a3eab950769e10a370cb2855c6cf818a27cac0ec6c87662c04a2ea1eac0da"} Nov 21 15:33:01 crc kubenswrapper[4774]: I1121 15:33:01.418210 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fbwmf" event={"ID":"a58f797a-f829-44c8-8339-90e553652d85","Type":"ContainerStarted","Data":"343cb3beb830139cbd9c13f35b3a3147a2d13105061ab9ebfc78d8a6035f7d1c"} Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.093368 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:33:02 crc kubenswrapper[4774]: E1121 15:33:02.094100 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.844177 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.849075 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.940425 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d410396-7f33-4931-aea3-fc85b62814f0-operator-scripts\") pod \"6d410396-7f33-4931-aea3-fc85b62814f0\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.940510 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmjcc\" (UniqueName: \"kubernetes.io/projected/a58f797a-f829-44c8-8339-90e553652d85-kube-api-access-nmjcc\") pod \"a58f797a-f829-44c8-8339-90e553652d85\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.940554 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t92zm\" (UniqueName: \"kubernetes.io/projected/6d410396-7f33-4931-aea3-fc85b62814f0-kube-api-access-t92zm\") pod \"6d410396-7f33-4931-aea3-fc85b62814f0\" (UID: \"6d410396-7f33-4931-aea3-fc85b62814f0\") " Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.940646 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58f797a-f829-44c8-8339-90e553652d85-operator-scripts\") pod \"a58f797a-f829-44c8-8339-90e553652d85\" (UID: \"a58f797a-f829-44c8-8339-90e553652d85\") " Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.941555 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a58f797a-f829-44c8-8339-90e553652d85-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a58f797a-f829-44c8-8339-90e553652d85" (UID: "a58f797a-f829-44c8-8339-90e553652d85"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.941576 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d410396-7f33-4931-aea3-fc85b62814f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6d410396-7f33-4931-aea3-fc85b62814f0" (UID: "6d410396-7f33-4931-aea3-fc85b62814f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.948943 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d410396-7f33-4931-aea3-fc85b62814f0-kube-api-access-t92zm" (OuterVolumeSpecName: "kube-api-access-t92zm") pod "6d410396-7f33-4931-aea3-fc85b62814f0" (UID: "6d410396-7f33-4931-aea3-fc85b62814f0"). InnerVolumeSpecName "kube-api-access-t92zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:02 crc kubenswrapper[4774]: I1121 15:33:02.955179 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a58f797a-f829-44c8-8339-90e553652d85-kube-api-access-nmjcc" (OuterVolumeSpecName: "kube-api-access-nmjcc") pod "a58f797a-f829-44c8-8339-90e553652d85" (UID: "a58f797a-f829-44c8-8339-90e553652d85"). InnerVolumeSpecName "kube-api-access-nmjcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.042757 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d410396-7f33-4931-aea3-fc85b62814f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.042812 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmjcc\" (UniqueName: \"kubernetes.io/projected/a58f797a-f829-44c8-8339-90e553652d85-kube-api-access-nmjcc\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.042864 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t92zm\" (UniqueName: \"kubernetes.io/projected/6d410396-7f33-4931-aea3-fc85b62814f0-kube-api-access-t92zm\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.042877 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a58f797a-f829-44c8-8339-90e553652d85-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.438440 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-ca6d-account-create-dkgrf" event={"ID":"6d410396-7f33-4931-aea3-fc85b62814f0","Type":"ContainerDied","Data":"a40bb880f3b9546d4dcf8578dd27f05715af1590b3d807dd48c0437ef910e9eb"} Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.438713 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a40bb880f3b9546d4dcf8578dd27f05715af1590b3d807dd48c0437ef910e9eb" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.438460 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-ca6d-account-create-dkgrf" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.440387 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fbwmf" event={"ID":"a58f797a-f829-44c8-8339-90e553652d85","Type":"ContainerDied","Data":"343cb3beb830139cbd9c13f35b3a3147a2d13105061ab9ebfc78d8a6035f7d1c"} Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.440454 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fbwmf" Nov 21 15:33:03 crc kubenswrapper[4774]: I1121 15:33:03.440477 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="343cb3beb830139cbd9c13f35b3a3147a2d13105061ab9ebfc78d8a6035f7d1c" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.479789 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-5ttt7"] Nov 21 15:33:05 crc kubenswrapper[4774]: E1121 15:33:05.480317 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a58f797a-f829-44c8-8339-90e553652d85" containerName="mariadb-database-create" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.480328 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a58f797a-f829-44c8-8339-90e553652d85" containerName="mariadb-database-create" Nov 21 15:33:05 crc kubenswrapper[4774]: E1121 15:33:05.480341 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d410396-7f33-4931-aea3-fc85b62814f0" containerName="mariadb-account-create" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.480347 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d410396-7f33-4931-aea3-fc85b62814f0" containerName="mariadb-account-create" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.480505 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d410396-7f33-4931-aea3-fc85b62814f0" containerName="mariadb-account-create" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.480515 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a58f797a-f829-44c8-8339-90e553652d85" containerName="mariadb-database-create" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.481046 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.483804 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-p9h2c" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.483847 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.533933 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5ttt7"] Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.585224 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-db-sync-config-data\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.585300 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-combined-ca-bundle\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.585476 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcls9\" (UniqueName: \"kubernetes.io/projected/6fd77432-caee-4eca-a8b0-f10e61ba68c5-kube-api-access-xcls9\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.686909 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-db-sync-config-data\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.687253 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-combined-ca-bundle\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.687301 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcls9\" (UniqueName: \"kubernetes.io/projected/6fd77432-caee-4eca-a8b0-f10e61ba68c5-kube-api-access-xcls9\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.696692 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-db-sync-config-data\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.697072 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-combined-ca-bundle\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.707972 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcls9\" (UniqueName: \"kubernetes.io/projected/6fd77432-caee-4eca-a8b0-f10e61ba68c5-kube-api-access-xcls9\") pod \"barbican-db-sync-5ttt7\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:05 crc kubenswrapper[4774]: I1121 15:33:05.798850 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:06 crc kubenswrapper[4774]: I1121 15:33:06.269713 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-5ttt7"] Nov 21 15:33:06 crc kubenswrapper[4774]: I1121 15:33:06.471367 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ttt7" event={"ID":"6fd77432-caee-4eca-a8b0-f10e61ba68c5","Type":"ContainerStarted","Data":"e82c98831c5e5fc913b361d4338f58a9eb3b6b86586401fd570a0ce46cd3c2ab"} Nov 21 15:33:06 crc kubenswrapper[4774]: I1121 15:33:06.471414 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ttt7" event={"ID":"6fd77432-caee-4eca-a8b0-f10e61ba68c5","Type":"ContainerStarted","Data":"3f52287daa88909537f858c6261b7cba1c638af525e379d0969780047379da04"} Nov 21 15:33:06 crc kubenswrapper[4774]: I1121 15:33:06.484436 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-5ttt7" podStartSLOduration=1.4844175769999999 podStartE2EDuration="1.484417577s" podCreationTimestamp="2025-11-21 15:33:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:33:06.483567603 +0000 UTC m=+5377.135766862" watchObservedRunningTime="2025-11-21 15:33:06.484417577 +0000 UTC m=+5377.136616836" Nov 21 15:33:08 crc kubenswrapper[4774]: I1121 15:33:08.491229 4774 generic.go:334] "Generic (PLEG): container finished" podID="6fd77432-caee-4eca-a8b0-f10e61ba68c5" containerID="e82c98831c5e5fc913b361d4338f58a9eb3b6b86586401fd570a0ce46cd3c2ab" exitCode=0 Nov 21 15:33:08 crc kubenswrapper[4774]: I1121 15:33:08.491340 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ttt7" event={"ID":"6fd77432-caee-4eca-a8b0-f10e61ba68c5","Type":"ContainerDied","Data":"e82c98831c5e5fc913b361d4338f58a9eb3b6b86586401fd570a0ce46cd3c2ab"} Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.855117 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.963135 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-combined-ca-bundle\") pod \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.963332 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-db-sync-config-data\") pod \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.963548 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcls9\" (UniqueName: \"kubernetes.io/projected/6fd77432-caee-4eca-a8b0-f10e61ba68c5-kube-api-access-xcls9\") pod \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\" (UID: \"6fd77432-caee-4eca-a8b0-f10e61ba68c5\") " Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.969860 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fd77432-caee-4eca-a8b0-f10e61ba68c5-kube-api-access-xcls9" (OuterVolumeSpecName: "kube-api-access-xcls9") pod "6fd77432-caee-4eca-a8b0-f10e61ba68c5" (UID: "6fd77432-caee-4eca-a8b0-f10e61ba68c5"). InnerVolumeSpecName "kube-api-access-xcls9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.976144 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6fd77432-caee-4eca-a8b0-f10e61ba68c5" (UID: "6fd77432-caee-4eca-a8b0-f10e61ba68c5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:33:09 crc kubenswrapper[4774]: I1121 15:33:09.989654 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fd77432-caee-4eca-a8b0-f10e61ba68c5" (UID: "6fd77432-caee-4eca-a8b0-f10e61ba68c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.065718 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcls9\" (UniqueName: \"kubernetes.io/projected/6fd77432-caee-4eca-a8b0-f10e61ba68c5-kube-api-access-xcls9\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.066014 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.066093 4774 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6fd77432-caee-4eca-a8b0-f10e61ba68c5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.512764 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-5ttt7" event={"ID":"6fd77432-caee-4eca-a8b0-f10e61ba68c5","Type":"ContainerDied","Data":"3f52287daa88909537f858c6261b7cba1c638af525e379d0969780047379da04"} Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.512808 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f52287daa88909537f858c6261b7cba1c638af525e379d0969780047379da04" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.512811 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-5ttt7" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.747625 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5bb856d459-sqtcr"] Nov 21 15:33:10 crc kubenswrapper[4774]: E1121 15:33:10.748055 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fd77432-caee-4eca-a8b0-f10e61ba68c5" containerName="barbican-db-sync" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.748075 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fd77432-caee-4eca-a8b0-f10e61ba68c5" containerName="barbican-db-sync" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.748260 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fd77432-caee-4eca-a8b0-f10e61ba68c5" containerName="barbican-db-sync" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.749200 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.758483 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.758483 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-p9h2c" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.758646 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.762865 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-65689d7cb-qmp65"] Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.764570 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.768285 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.788942 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5bb856d459-sqtcr"] Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.795145 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca28a1b6-e307-463d-af12-65024dddb2a7-logs\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.795476 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-config-data\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.795607 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-config-data-custom\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.795771 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh6nk\" (UniqueName: \"kubernetes.io/projected/ca28a1b6-e307-463d-af12-65024dddb2a7-kube-api-access-fh6nk\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.796014 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-combined-ca-bundle\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.812583 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-65689d7cb-qmp65"] Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.889065 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578b474d4c-5c9bt"] Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.892139 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.898851 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca28a1b6-e307-463d-af12-65024dddb2a7-logs\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.898894 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-config-data\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.898920 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-config-data-custom\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.898946 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh6nk\" (UniqueName: \"kubernetes.io/projected/ca28a1b6-e307-463d-af12-65024dddb2a7-kube-api-access-fh6nk\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.898969 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-combined-ca-bundle\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.898989 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9tsr\" (UniqueName: \"kubernetes.io/projected/08f110f4-3615-41c7-8954-f450c651fe05-kube-api-access-x9tsr\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.899017 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-combined-ca-bundle\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.899037 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08f110f4-3615-41c7-8954-f450c651fe05-logs\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.899060 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-config-data\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.899112 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-config-data-custom\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.899590 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ca28a1b6-e307-463d-af12-65024dddb2a7-logs\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.906257 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-config-data\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.906650 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-combined-ca-bundle\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.913129 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b474d4c-5c9bt"] Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.927733 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca28a1b6-e307-463d-af12-65024dddb2a7-config-data-custom\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.933575 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh6nk\" (UniqueName: \"kubernetes.io/projected/ca28a1b6-e307-463d-af12-65024dddb2a7-kube-api-access-fh6nk\") pod \"barbican-worker-5bb856d459-sqtcr\" (UID: \"ca28a1b6-e307-463d-af12-65024dddb2a7\") " pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.955359 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-55dd675f88-stgkz"] Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.956863 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.961579 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Nov 21 15:33:10 crc kubenswrapper[4774]: I1121 15:33:10.962152 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-55dd675f88-stgkz"] Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.000241 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-config\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.000293 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-combined-ca-bundle\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.000401 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vvv4\" (UniqueName: \"kubernetes.io/projected/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-kube-api-access-4vvv4\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.000439 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-logs\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.000545 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-combined-ca-bundle\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.000582 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9tsr\" (UniqueName: \"kubernetes.io/projected/08f110f4-3615-41c7-8954-f450c651fe05-kube-api-access-x9tsr\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001100 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-config-data\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001149 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08f110f4-3615-41c7-8954-f450c651fe05-logs\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001188 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-config-data\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001236 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-sb\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001325 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-dns-svc\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001410 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-config-data-custom\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001461 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-nb\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001508 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vttd7\" (UniqueName: \"kubernetes.io/projected/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-kube-api-access-vttd7\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001526 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08f110f4-3615-41c7-8954-f450c651fe05-logs\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.001582 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-config-data-custom\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.005932 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-combined-ca-bundle\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.006280 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-config-data-custom\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.006938 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08f110f4-3615-41c7-8954-f450c651fe05-config-data\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.019667 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9tsr\" (UniqueName: \"kubernetes.io/projected/08f110f4-3615-41c7-8954-f450c651fe05-kube-api-access-x9tsr\") pod \"barbican-keystone-listener-65689d7cb-qmp65\" (UID: \"08f110f4-3615-41c7-8954-f450c651fe05\") " pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.074627 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5bb856d459-sqtcr" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.097563 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103253 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-nb\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103331 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vttd7\" (UniqueName: \"kubernetes.io/projected/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-kube-api-access-vttd7\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103358 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-config-data-custom\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103388 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-config\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103406 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-combined-ca-bundle\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103428 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vvv4\" (UniqueName: \"kubernetes.io/projected/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-kube-api-access-4vvv4\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103443 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-logs\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103490 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-config-data\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103514 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-sb\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.103544 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-dns-svc\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.104681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-dns-svc\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.105366 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-nb\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.105703 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-logs\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.105712 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-config\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.106077 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-sb\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.108776 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-combined-ca-bundle\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.110107 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-config-data-custom\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.123571 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-config-data\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.126102 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vttd7\" (UniqueName: \"kubernetes.io/projected/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-kube-api-access-vttd7\") pod \"dnsmasq-dns-578b474d4c-5c9bt\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.126485 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vvv4\" (UniqueName: \"kubernetes.io/projected/5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3-kube-api-access-4vvv4\") pod \"barbican-api-55dd675f88-stgkz\" (UID: \"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3\") " pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.301137 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.368211 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.590917 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5bb856d459-sqtcr"] Nov 21 15:33:11 crc kubenswrapper[4774]: W1121 15:33:11.598700 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca28a1b6_e307_463d_af12_65024dddb2a7.slice/crio-d2840d35af1ef41a0361eba4847f7730a767ed7a1d09ddb35d0832fec331365e WatchSource:0}: Error finding container d2840d35af1ef41a0361eba4847f7730a767ed7a1d09ddb35d0832fec331365e: Status 404 returned error can't find the container with id d2840d35af1ef41a0361eba4847f7730a767ed7a1d09ddb35d0832fec331365e Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.642119 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-65689d7cb-qmp65"] Nov 21 15:33:11 crc kubenswrapper[4774]: W1121 15:33:11.651490 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08f110f4_3615_41c7_8954_f450c651fe05.slice/crio-c0b4ab416941ef32e3e401d63ccc48e719b36df095598bbf1344d10cb729e261 WatchSource:0}: Error finding container c0b4ab416941ef32e3e401d63ccc48e719b36df095598bbf1344d10cb729e261: Status 404 returned error can't find the container with id c0b4ab416941ef32e3e401d63ccc48e719b36df095598bbf1344d10cb729e261 Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.837355 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b474d4c-5c9bt"] Nov 21 15:33:11 crc kubenswrapper[4774]: W1121 15:33:11.840583 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5fe8ce53_fc65_4c16_821e_fa1467f6bf30.slice/crio-92589b45bc273f56e3f89771302f156ccb2a0f28dab0fb0e8b79379552139a18 WatchSource:0}: Error finding container 92589b45bc273f56e3f89771302f156ccb2a0f28dab0fb0e8b79379552139a18: Status 404 returned error can't find the container with id 92589b45bc273f56e3f89771302f156ccb2a0f28dab0fb0e8b79379552139a18 Nov 21 15:33:11 crc kubenswrapper[4774]: I1121 15:33:11.935909 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-55dd675f88-stgkz"] Nov 21 15:33:11 crc kubenswrapper[4774]: W1121 15:33:11.938880 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b6d8e18_2f4f_40fd_a29f_4ab576d68ea3.slice/crio-e416b41c2118543653feb4556f920ec6d3e53f4f93783f28090e6e8cbe7491ed WatchSource:0}: Error finding container e416b41c2118543653feb4556f920ec6d3e53f4f93783f28090e6e8cbe7491ed: Status 404 returned error can't find the container with id e416b41c2118543653feb4556f920ec6d3e53f4f93783f28090e6e8cbe7491ed Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.567537 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55dd675f88-stgkz" event={"ID":"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3","Type":"ContainerStarted","Data":"1ebd0bd9ea484077ce727b54ae163da75dd7ef18279b79287ae009e928c091ce"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.567877 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55dd675f88-stgkz" event={"ID":"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3","Type":"ContainerStarted","Data":"88fd8d25f8effee8f506bc2782b77a0458aeda730b6c5bd116fc2a580a3a84c0"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.567894 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-55dd675f88-stgkz" event={"ID":"5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3","Type":"ContainerStarted","Data":"e416b41c2118543653feb4556f920ec6d3e53f4f93783f28090e6e8cbe7491ed"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.568339 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.569619 4774 generic.go:334] "Generic (PLEG): container finished" podID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerID="e297a837e5e94299d9704b18eebc5c3a74c3058ac7c711345c65ed0d252fcec2" exitCode=0 Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.569694 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" event={"ID":"5fe8ce53-fc65-4c16-821e-fa1467f6bf30","Type":"ContainerDied","Data":"e297a837e5e94299d9704b18eebc5c3a74c3058ac7c711345c65ed0d252fcec2"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.569715 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" event={"ID":"5fe8ce53-fc65-4c16-821e-fa1467f6bf30","Type":"ContainerStarted","Data":"92589b45bc273f56e3f89771302f156ccb2a0f28dab0fb0e8b79379552139a18"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.574387 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" event={"ID":"08f110f4-3615-41c7-8954-f450c651fe05","Type":"ContainerStarted","Data":"0aa4ca9f32273715561691087b42456cea3f316fa484ccd39f2e56b1419304d7"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.574437 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" event={"ID":"08f110f4-3615-41c7-8954-f450c651fe05","Type":"ContainerStarted","Data":"e64a3b1eb26002d922f6a9cc5604a3eaa24627d599ebf967349001eb791eecfb"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.574451 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" event={"ID":"08f110f4-3615-41c7-8954-f450c651fe05","Type":"ContainerStarted","Data":"c0b4ab416941ef32e3e401d63ccc48e719b36df095598bbf1344d10cb729e261"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.580643 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bb856d459-sqtcr" event={"ID":"ca28a1b6-e307-463d-af12-65024dddb2a7","Type":"ContainerStarted","Data":"f382933ec9505a0282ec6fedf8bdcaa9f5c2b501c8ff822c93f414896f868557"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.580973 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bb856d459-sqtcr" event={"ID":"ca28a1b6-e307-463d-af12-65024dddb2a7","Type":"ContainerStarted","Data":"82d4cf163a08ad9b7a3e4d3a263f577b2ba933571c32c47e6cf5f6e40fc4ed4c"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.585608 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bb856d459-sqtcr" event={"ID":"ca28a1b6-e307-463d-af12-65024dddb2a7","Type":"ContainerStarted","Data":"d2840d35af1ef41a0361eba4847f7730a767ed7a1d09ddb35d0832fec331365e"} Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.617060 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-55dd675f88-stgkz" podStartSLOduration=2.617033917 podStartE2EDuration="2.617033917s" podCreationTimestamp="2025-11-21 15:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:33:12.594213997 +0000 UTC m=+5383.246413256" watchObservedRunningTime="2025-11-21 15:33:12.617033917 +0000 UTC m=+5383.269233196" Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.629992 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-65689d7cb-qmp65" podStartSLOduration=2.6299754760000003 podStartE2EDuration="2.629975476s" podCreationTimestamp="2025-11-21 15:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:33:12.622276947 +0000 UTC m=+5383.274476206" watchObservedRunningTime="2025-11-21 15:33:12.629975476 +0000 UTC m=+5383.282174735" Nov 21 15:33:12 crc kubenswrapper[4774]: I1121 15:33:12.673669 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5bb856d459-sqtcr" podStartSLOduration=2.673652111 podStartE2EDuration="2.673652111s" podCreationTimestamp="2025-11-21 15:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:33:12.665144329 +0000 UTC m=+5383.317343588" watchObservedRunningTime="2025-11-21 15:33:12.673652111 +0000 UTC m=+5383.325851360" Nov 21 15:33:13 crc kubenswrapper[4774]: I1121 15:33:13.093022 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:33:13 crc kubenswrapper[4774]: E1121 15:33:13.093254 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:33:13 crc kubenswrapper[4774]: I1121 15:33:13.616312 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" event={"ID":"5fe8ce53-fc65-4c16-821e-fa1467f6bf30","Type":"ContainerStarted","Data":"b0ff764da328ba11b7ef5f7d205475a9215fb64c167264937a5946b4631a2590"} Nov 21 15:33:13 crc kubenswrapper[4774]: I1121 15:33:13.616577 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:13 crc kubenswrapper[4774]: I1121 15:33:13.618302 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:13 crc kubenswrapper[4774]: I1121 15:33:13.639280 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" podStartSLOduration=3.6392647670000002 podStartE2EDuration="3.639264767s" podCreationTimestamp="2025-11-21 15:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:33:13.636478557 +0000 UTC m=+5384.288677826" watchObservedRunningTime="2025-11-21 15:33:13.639264767 +0000 UTC m=+5384.291464026" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.303314 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.363523 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b64df9dbc-s2plv"] Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.363955 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerName="dnsmasq-dns" containerID="cri-o://727e9b29a7e95e848261c8c8fcd8a786c9c58723037e547997d13a7acfc20bb1" gracePeriod=10 Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.726861 4774 generic.go:334] "Generic (PLEG): container finished" podID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerID="727e9b29a7e95e848261c8c8fcd8a786c9c58723037e547997d13a7acfc20bb1" exitCode=0 Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.726907 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" event={"ID":"1dfeeb56-c04c-43e1-9572-031dc79efd56","Type":"ContainerDied","Data":"727e9b29a7e95e848261c8c8fcd8a786c9c58723037e547997d13a7acfc20bb1"} Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.860721 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.899831 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-config\") pod \"1dfeeb56-c04c-43e1-9572-031dc79efd56\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.899899 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-sb\") pod \"1dfeeb56-c04c-43e1-9572-031dc79efd56\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.900069 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-nb\") pod \"1dfeeb56-c04c-43e1-9572-031dc79efd56\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.900102 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlw7j\" (UniqueName: \"kubernetes.io/projected/1dfeeb56-c04c-43e1-9572-031dc79efd56-kube-api-access-zlw7j\") pod \"1dfeeb56-c04c-43e1-9572-031dc79efd56\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.900177 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-dns-svc\") pod \"1dfeeb56-c04c-43e1-9572-031dc79efd56\" (UID: \"1dfeeb56-c04c-43e1-9572-031dc79efd56\") " Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.916013 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1dfeeb56-c04c-43e1-9572-031dc79efd56-kube-api-access-zlw7j" (OuterVolumeSpecName: "kube-api-access-zlw7j") pod "1dfeeb56-c04c-43e1-9572-031dc79efd56" (UID: "1dfeeb56-c04c-43e1-9572-031dc79efd56"). InnerVolumeSpecName "kube-api-access-zlw7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.949668 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1dfeeb56-c04c-43e1-9572-031dc79efd56" (UID: "1dfeeb56-c04c-43e1-9572-031dc79efd56"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.964339 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1dfeeb56-c04c-43e1-9572-031dc79efd56" (UID: "1dfeeb56-c04c-43e1-9572-031dc79efd56"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.967530 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-config" (OuterVolumeSpecName: "config") pod "1dfeeb56-c04c-43e1-9572-031dc79efd56" (UID: "1dfeeb56-c04c-43e1-9572-031dc79efd56"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:21 crc kubenswrapper[4774]: I1121 15:33:21.975088 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1dfeeb56-c04c-43e1-9572-031dc79efd56" (UID: "1dfeeb56-c04c-43e1-9572-031dc79efd56"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.001943 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.001988 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.002000 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.002011 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlw7j\" (UniqueName: \"kubernetes.io/projected/1dfeeb56-c04c-43e1-9572-031dc79efd56-kube-api-access-zlw7j\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.002021 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1dfeeb56-c04c-43e1-9572-031dc79efd56-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.737502 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" event={"ID":"1dfeeb56-c04c-43e1-9572-031dc79efd56","Type":"ContainerDied","Data":"98859c4979de4e63bed2a97012b468c9f43e8e62665cda2f521931cd276c82a2"} Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.737569 4774 scope.go:117] "RemoveContainer" containerID="727e9b29a7e95e848261c8c8fcd8a786c9c58723037e547997d13a7acfc20bb1" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.738448 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b64df9dbc-s2plv" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.764368 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b64df9dbc-s2plv"] Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.765255 4774 scope.go:117] "RemoveContainer" containerID="354b9fbcfee59a3bfbea5492448476e669a565134d24b5c3da5566b95eee05b3" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.770648 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b64df9dbc-s2plv"] Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.831900 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:22 crc kubenswrapper[4774]: I1121 15:33:22.978639 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-55dd675f88-stgkz" Nov 21 15:33:24 crc kubenswrapper[4774]: I1121 15:33:24.103420 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" path="/var/lib/kubelet/pods/1dfeeb56-c04c-43e1-9572-031dc79efd56/volumes" Nov 21 15:33:26 crc kubenswrapper[4774]: I1121 15:33:26.093018 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:33:26 crc kubenswrapper[4774]: E1121 15:33:26.093475 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.827358 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-z5bzr"] Nov 21 15:33:32 crc kubenswrapper[4774]: E1121 15:33:32.828148 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerName="dnsmasq-dns" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.828162 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerName="dnsmasq-dns" Nov 21 15:33:32 crc kubenswrapper[4774]: E1121 15:33:32.828177 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerName="init" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.828185 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerName="init" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.828369 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1dfeeb56-c04c-43e1-9572-031dc79efd56" containerName="dnsmasq-dns" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.828921 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.838601 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z5bzr"] Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.928412 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-711e-account-create-8s4fd"] Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.929715 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.932728 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.935775 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-711e-account-create-8s4fd"] Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.989950 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv528\" (UniqueName: \"kubernetes.io/projected/7e295507-fc0d-4844-97a7-7121be4456ad-kube-api-access-qv528\") pod \"neutron-db-create-z5bzr\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:32 crc kubenswrapper[4774]: I1121 15:33:32.990304 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e295507-fc0d-4844-97a7-7121be4456ad-operator-scripts\") pod \"neutron-db-create-z5bzr\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.092030 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv528\" (UniqueName: \"kubernetes.io/projected/7e295507-fc0d-4844-97a7-7121be4456ad-kube-api-access-qv528\") pod \"neutron-db-create-z5bzr\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.092153 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e295507-fc0d-4844-97a7-7121be4456ad-operator-scripts\") pod \"neutron-db-create-z5bzr\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.092234 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btm7j\" (UniqueName: \"kubernetes.io/projected/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-kube-api-access-btm7j\") pod \"neutron-711e-account-create-8s4fd\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.092276 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-operator-scripts\") pod \"neutron-711e-account-create-8s4fd\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.093137 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e295507-fc0d-4844-97a7-7121be4456ad-operator-scripts\") pod \"neutron-db-create-z5bzr\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.115898 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv528\" (UniqueName: \"kubernetes.io/projected/7e295507-fc0d-4844-97a7-7121be4456ad-kube-api-access-qv528\") pod \"neutron-db-create-z5bzr\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.158682 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.193796 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btm7j\" (UniqueName: \"kubernetes.io/projected/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-kube-api-access-btm7j\") pod \"neutron-711e-account-create-8s4fd\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.194327 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-operator-scripts\") pod \"neutron-711e-account-create-8s4fd\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.195007 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-operator-scripts\") pod \"neutron-711e-account-create-8s4fd\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.216925 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btm7j\" (UniqueName: \"kubernetes.io/projected/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-kube-api-access-btm7j\") pod \"neutron-711e-account-create-8s4fd\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.246972 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.609955 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-z5bzr"] Nov 21 15:33:33 crc kubenswrapper[4774]: W1121 15:33:33.611363 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e295507_fc0d_4844_97a7_7121be4456ad.slice/crio-b470e6d30aeec579bf60f92459babd5bae7b790a435f73443d476d15a17600b1 WatchSource:0}: Error finding container b470e6d30aeec579bf60f92459babd5bae7b790a435f73443d476d15a17600b1: Status 404 returned error can't find the container with id b470e6d30aeec579bf60f92459babd5bae7b790a435f73443d476d15a17600b1 Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.718185 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-711e-account-create-8s4fd"] Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.820903 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-711e-account-create-8s4fd" event={"ID":"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835","Type":"ContainerStarted","Data":"cf74e31cee0cc957c8df1778432d1b93eada4aa283eabf6b643bbdd288811e41"} Nov 21 15:33:33 crc kubenswrapper[4774]: I1121 15:33:33.822207 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z5bzr" event={"ID":"7e295507-fc0d-4844-97a7-7121be4456ad","Type":"ContainerStarted","Data":"b470e6d30aeec579bf60f92459babd5bae7b790a435f73443d476d15a17600b1"} Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.777270 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m78sd"] Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.780442 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.812339 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m78sd"] Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.833951 4774 generic.go:334] "Generic (PLEG): container finished" podID="a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" containerID="9194ad9be5dc1830c57b82a75343406a75d24d9071113ce5ceb99061b778b111" exitCode=0 Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.834296 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-711e-account-create-8s4fd" event={"ID":"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835","Type":"ContainerDied","Data":"9194ad9be5dc1830c57b82a75343406a75d24d9071113ce5ceb99061b778b111"} Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.839191 4774 generic.go:334] "Generic (PLEG): container finished" podID="7e295507-fc0d-4844-97a7-7121be4456ad" containerID="86c679eb0f2aa8c651097aba371f01631f91789d72589c40973f19deb9f7592d" exitCode=0 Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.839344 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z5bzr" event={"ID":"7e295507-fc0d-4844-97a7-7121be4456ad","Type":"ContainerDied","Data":"86c679eb0f2aa8c651097aba371f01631f91789d72589c40973f19deb9f7592d"} Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.923571 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-catalog-content\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.923624 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-utilities\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:34 crc kubenswrapper[4774]: I1121 15:33:34.923643 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v77hd\" (UniqueName: \"kubernetes.io/projected/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-kube-api-access-v77hd\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.025479 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-catalog-content\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.025533 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-utilities\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.025553 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v77hd\" (UniqueName: \"kubernetes.io/projected/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-kube-api-access-v77hd\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.026011 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-catalog-content\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.026437 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-utilities\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.044533 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v77hd\" (UniqueName: \"kubernetes.io/projected/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-kube-api-access-v77hd\") pod \"community-operators-m78sd\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.111068 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.640683 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m78sd"] Nov 21 15:33:35 crc kubenswrapper[4774]: I1121 15:33:35.847124 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m78sd" event={"ID":"ab9b75f8-cc84-4800-9a17-81ac70b4fca5","Type":"ContainerStarted","Data":"198f068c03dbc733ca2e625f2a3cf30eda6024200866a82d8d3d61d8083ac253"} Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.353680 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.362316 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.468474 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e295507-fc0d-4844-97a7-7121be4456ad-operator-scripts\") pod \"7e295507-fc0d-4844-97a7-7121be4456ad\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.468550 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv528\" (UniqueName: \"kubernetes.io/projected/7e295507-fc0d-4844-97a7-7121be4456ad-kube-api-access-qv528\") pod \"7e295507-fc0d-4844-97a7-7121be4456ad\" (UID: \"7e295507-fc0d-4844-97a7-7121be4456ad\") " Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.468664 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btm7j\" (UniqueName: \"kubernetes.io/projected/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-kube-api-access-btm7j\") pod \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.468709 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-operator-scripts\") pod \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\" (UID: \"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835\") " Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.469517 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" (UID: "a73a54d7-8b04-46ea-ad8c-a4b4f6d57835"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.469626 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e295507-fc0d-4844-97a7-7121be4456ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e295507-fc0d-4844-97a7-7121be4456ad" (UID: "7e295507-fc0d-4844-97a7-7121be4456ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.474988 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e295507-fc0d-4844-97a7-7121be4456ad-kube-api-access-qv528" (OuterVolumeSpecName: "kube-api-access-qv528") pod "7e295507-fc0d-4844-97a7-7121be4456ad" (UID: "7e295507-fc0d-4844-97a7-7121be4456ad"). InnerVolumeSpecName "kube-api-access-qv528". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.475341 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-kube-api-access-btm7j" (OuterVolumeSpecName: "kube-api-access-btm7j") pod "a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" (UID: "a73a54d7-8b04-46ea-ad8c-a4b4f6d57835"). InnerVolumeSpecName "kube-api-access-btm7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.571381 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e295507-fc0d-4844-97a7-7121be4456ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.571419 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv528\" (UniqueName: \"kubernetes.io/projected/7e295507-fc0d-4844-97a7-7121be4456ad-kube-api-access-qv528\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.571435 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btm7j\" (UniqueName: \"kubernetes.io/projected/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-kube-api-access-btm7j\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.571446 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.860867 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-711e-account-create-8s4fd" event={"ID":"a73a54d7-8b04-46ea-ad8c-a4b4f6d57835","Type":"ContainerDied","Data":"cf74e31cee0cc957c8df1778432d1b93eada4aa283eabf6b643bbdd288811e41"} Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.860914 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf74e31cee0cc957c8df1778432d1b93eada4aa283eabf6b643bbdd288811e41" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.860934 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-711e-account-create-8s4fd" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.862363 4774 generic.go:334] "Generic (PLEG): container finished" podID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerID="21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d" exitCode=0 Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.862417 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m78sd" event={"ID":"ab9b75f8-cc84-4800-9a17-81ac70b4fca5","Type":"ContainerDied","Data":"21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d"} Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.863970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-z5bzr" event={"ID":"7e295507-fc0d-4844-97a7-7121be4456ad","Type":"ContainerDied","Data":"b470e6d30aeec579bf60f92459babd5bae7b790a435f73443d476d15a17600b1"} Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.864005 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b470e6d30aeec579bf60f92459babd5bae7b790a435f73443d476d15a17600b1" Nov 21 15:33:36 crc kubenswrapper[4774]: I1121 15:33:36.864589 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-z5bzr" Nov 21 15:33:37 crc kubenswrapper[4774]: I1121 15:33:37.096142 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:33:37 crc kubenswrapper[4774]: E1121 15:33:37.096427 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.236746 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-wr864"] Nov 21 15:33:38 crc kubenswrapper[4774]: E1121 15:33:38.264941 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" containerName="mariadb-account-create" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.264977 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" containerName="mariadb-account-create" Nov 21 15:33:38 crc kubenswrapper[4774]: E1121 15:33:38.265008 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e295507-fc0d-4844-97a7-7121be4456ad" containerName="mariadb-database-create" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.265015 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e295507-fc0d-4844-97a7-7121be4456ad" containerName="mariadb-database-create" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.265354 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e295507-fc0d-4844-97a7-7121be4456ad" containerName="mariadb-database-create" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.265378 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" containerName="mariadb-account-create" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.266407 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.273681 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wr864"] Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.277695 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-gkmrc" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.279544 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.279579 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.401771 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-combined-ca-bundle\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.401871 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5mpl\" (UniqueName: \"kubernetes.io/projected/6ce5745d-f81f-4de1-b663-f7469255c903-kube-api-access-v5mpl\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.401937 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-config\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.502523 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-combined-ca-bundle\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.502616 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5mpl\" (UniqueName: \"kubernetes.io/projected/6ce5745d-f81f-4de1-b663-f7469255c903-kube-api-access-v5mpl\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.502677 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-config\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.511716 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-config\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.521033 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-combined-ca-bundle\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.525677 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5mpl\" (UniqueName: \"kubernetes.io/projected/6ce5745d-f81f-4de1-b663-f7469255c903-kube-api-access-v5mpl\") pod \"neutron-db-sync-wr864\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:38 crc kubenswrapper[4774]: I1121 15:33:38.593017 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wr864" Nov 21 15:33:39 crc kubenswrapper[4774]: I1121 15:33:39.406512 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-wr864"] Nov 21 15:33:39 crc kubenswrapper[4774]: I1121 15:33:39.896450 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wr864" event={"ID":"6ce5745d-f81f-4de1-b663-f7469255c903","Type":"ContainerStarted","Data":"b0d9c21c6a61d2d13bb005329f04e1ae49a866396f6baa0a3a3fa3852c13887c"} Nov 21 15:33:40 crc kubenswrapper[4774]: I1121 15:33:40.907487 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wr864" event={"ID":"6ce5745d-f81f-4de1-b663-f7469255c903","Type":"ContainerStarted","Data":"418635577c42296bc9576b37aa34b3850aa16df360df6e0eed199fb4b4f65a4c"} Nov 21 15:33:41 crc kubenswrapper[4774]: I1121 15:33:41.915951 4774 generic.go:334] "Generic (PLEG): container finished" podID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerID="bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778" exitCode=0 Nov 21 15:33:41 crc kubenswrapper[4774]: I1121 15:33:41.916020 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m78sd" event={"ID":"ab9b75f8-cc84-4800-9a17-81ac70b4fca5","Type":"ContainerDied","Data":"bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778"} Nov 21 15:33:41 crc kubenswrapper[4774]: I1121 15:33:41.956271 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-wr864" podStartSLOduration=3.95624915 podStartE2EDuration="3.95624915s" podCreationTimestamp="2025-11-21 15:33:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:33:41.948771966 +0000 UTC m=+5412.600971225" watchObservedRunningTime="2025-11-21 15:33:41.95624915 +0000 UTC m=+5412.608448409" Nov 21 15:33:44 crc kubenswrapper[4774]: I1121 15:33:44.951162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m78sd" event={"ID":"ab9b75f8-cc84-4800-9a17-81ac70b4fca5","Type":"ContainerStarted","Data":"0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a"} Nov 21 15:33:44 crc kubenswrapper[4774]: I1121 15:33:44.973924 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m78sd" podStartSLOduration=3.598211299 podStartE2EDuration="10.973907063s" podCreationTimestamp="2025-11-21 15:33:34 +0000 UTC" firstStartedPulling="2025-11-21 15:33:36.865497232 +0000 UTC m=+5407.517696481" lastFinishedPulling="2025-11-21 15:33:44.241192986 +0000 UTC m=+5414.893392245" observedRunningTime="2025-11-21 15:33:44.971449253 +0000 UTC m=+5415.623648512" watchObservedRunningTime="2025-11-21 15:33:44.973907063 +0000 UTC m=+5415.626106322" Nov 21 15:33:45 crc kubenswrapper[4774]: I1121 15:33:45.112134 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:45 crc kubenswrapper[4774]: I1121 15:33:45.112184 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:46 crc kubenswrapper[4774]: I1121 15:33:46.157244 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-m78sd" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="registry-server" probeResult="failure" output=< Nov 21 15:33:46 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 15:33:46 crc kubenswrapper[4774]: > Nov 21 15:33:51 crc kubenswrapper[4774]: I1121 15:33:51.093023 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:33:51 crc kubenswrapper[4774]: E1121 15:33:51.093811 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:33:55 crc kubenswrapper[4774]: I1121 15:33:55.161640 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:55 crc kubenswrapper[4774]: I1121 15:33:55.215783 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:55 crc kubenswrapper[4774]: I1121 15:33:55.408479 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m78sd"] Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.062418 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-m78sd" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="registry-server" containerID="cri-o://0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a" gracePeriod=2 Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.526164 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.566048 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-utilities\") pod \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.566118 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v77hd\" (UniqueName: \"kubernetes.io/projected/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-kube-api-access-v77hd\") pod \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.566170 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-catalog-content\") pod \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\" (UID: \"ab9b75f8-cc84-4800-9a17-81ac70b4fca5\") " Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.567517 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-utilities" (OuterVolumeSpecName: "utilities") pod "ab9b75f8-cc84-4800-9a17-81ac70b4fca5" (UID: "ab9b75f8-cc84-4800-9a17-81ac70b4fca5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.572217 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-kube-api-access-v77hd" (OuterVolumeSpecName: "kube-api-access-v77hd") pod "ab9b75f8-cc84-4800-9a17-81ac70b4fca5" (UID: "ab9b75f8-cc84-4800-9a17-81ac70b4fca5"). InnerVolumeSpecName "kube-api-access-v77hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.617202 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab9b75f8-cc84-4800-9a17-81ac70b4fca5" (UID: "ab9b75f8-cc84-4800-9a17-81ac70b4fca5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.668566 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.668608 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v77hd\" (UniqueName: \"kubernetes.io/projected/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-kube-api-access-v77hd\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:57 crc kubenswrapper[4774]: I1121 15:33:57.668620 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9b75f8-cc84-4800-9a17-81ac70b4fca5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.074579 4774 generic.go:334] "Generic (PLEG): container finished" podID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerID="0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a" exitCode=0 Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.074716 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m78sd" event={"ID":"ab9b75f8-cc84-4800-9a17-81ac70b4fca5","Type":"ContainerDied","Data":"0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a"} Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.074945 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m78sd" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.075941 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m78sd" event={"ID":"ab9b75f8-cc84-4800-9a17-81ac70b4fca5","Type":"ContainerDied","Data":"198f068c03dbc733ca2e625f2a3cf30eda6024200866a82d8d3d61d8083ac253"} Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.075977 4774 scope.go:117] "RemoveContainer" containerID="0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.099156 4774 scope.go:117] "RemoveContainer" containerID="bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.121283 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-m78sd"] Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.126751 4774 scope.go:117] "RemoveContainer" containerID="21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.127739 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-m78sd"] Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.161498 4774 scope.go:117] "RemoveContainer" containerID="0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a" Nov 21 15:33:58 crc kubenswrapper[4774]: E1121 15:33:58.162033 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a\": container with ID starting with 0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a not found: ID does not exist" containerID="0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.162089 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a"} err="failed to get container status \"0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a\": rpc error: code = NotFound desc = could not find container \"0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a\": container with ID starting with 0d3b276962cf130e2e2a5227b606a0f5c61b4d3b93cde2f642307697bd34ba0a not found: ID does not exist" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.162126 4774 scope.go:117] "RemoveContainer" containerID="bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778" Nov 21 15:33:58 crc kubenswrapper[4774]: E1121 15:33:58.162611 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778\": container with ID starting with bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778 not found: ID does not exist" containerID="bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.162646 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778"} err="failed to get container status \"bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778\": rpc error: code = NotFound desc = could not find container \"bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778\": container with ID starting with bd7866f936b919bf867ca00219c6209e414a22b187e581972349f6200d05d778 not found: ID does not exist" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.162673 4774 scope.go:117] "RemoveContainer" containerID="21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d" Nov 21 15:33:58 crc kubenswrapper[4774]: E1121 15:33:58.162967 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d\": container with ID starting with 21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d not found: ID does not exist" containerID="21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d" Nov 21 15:33:58 crc kubenswrapper[4774]: I1121 15:33:58.163001 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d"} err="failed to get container status \"21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d\": rpc error: code = NotFound desc = could not find container \"21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d\": container with ID starting with 21d41dc493e99a8b2e171a320cb75445cfa9e11defb7310924cccad3b691156d not found: ID does not exist" Nov 21 15:34:00 crc kubenswrapper[4774]: I1121 15:34:00.104034 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" path="/var/lib/kubelet/pods/ab9b75f8-cc84-4800-9a17-81ac70b4fca5/volumes" Nov 21 15:34:02 crc kubenswrapper[4774]: I1121 15:34:02.117862 4774 generic.go:334] "Generic (PLEG): container finished" podID="6ce5745d-f81f-4de1-b663-f7469255c903" containerID="418635577c42296bc9576b37aa34b3850aa16df360df6e0eed199fb4b4f65a4c" exitCode=0 Nov 21 15:34:02 crc kubenswrapper[4774]: I1121 15:34:02.117943 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wr864" event={"ID":"6ce5745d-f81f-4de1-b663-f7469255c903","Type":"ContainerDied","Data":"418635577c42296bc9576b37aa34b3850aa16df360df6e0eed199fb4b4f65a4c"} Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.440519 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wr864" Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.464707 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-combined-ca-bundle\") pod \"6ce5745d-f81f-4de1-b663-f7469255c903\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.464781 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-config\") pod \"6ce5745d-f81f-4de1-b663-f7469255c903\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.464879 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5mpl\" (UniqueName: \"kubernetes.io/projected/6ce5745d-f81f-4de1-b663-f7469255c903-kube-api-access-v5mpl\") pod \"6ce5745d-f81f-4de1-b663-f7469255c903\" (UID: \"6ce5745d-f81f-4de1-b663-f7469255c903\") " Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.470036 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ce5745d-f81f-4de1-b663-f7469255c903-kube-api-access-v5mpl" (OuterVolumeSpecName: "kube-api-access-v5mpl") pod "6ce5745d-f81f-4de1-b663-f7469255c903" (UID: "6ce5745d-f81f-4de1-b663-f7469255c903"). InnerVolumeSpecName "kube-api-access-v5mpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.490607 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-config" (OuterVolumeSpecName: "config") pod "6ce5745d-f81f-4de1-b663-f7469255c903" (UID: "6ce5745d-f81f-4de1-b663-f7469255c903"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.495356 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ce5745d-f81f-4de1-b663-f7469255c903" (UID: "6ce5745d-f81f-4de1-b663-f7469255c903"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.565935 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.565963 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ce5745d-f81f-4de1-b663-f7469255c903-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:03 crc kubenswrapper[4774]: I1121 15:34:03.565975 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5mpl\" (UniqueName: \"kubernetes.io/projected/6ce5745d-f81f-4de1-b663-f7469255c903-kube-api-access-v5mpl\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.093252 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.137630 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-wr864" event={"ID":"6ce5745d-f81f-4de1-b663-f7469255c903","Type":"ContainerDied","Data":"b0d9c21c6a61d2d13bb005329f04e1ae49a866396f6baa0a3a3fa3852c13887c"} Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.137689 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0d9c21c6a61d2d13bb005329f04e1ae49a866396f6baa0a3a3fa3852c13887c" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.137698 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-wr864" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.371769 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8bd7f9b4c-mvndp"] Nov 21 15:34:04 crc kubenswrapper[4774]: E1121 15:34:04.373519 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="registry-server" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.373538 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="registry-server" Nov 21 15:34:04 crc kubenswrapper[4774]: E1121 15:34:04.373552 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ce5745d-f81f-4de1-b663-f7469255c903" containerName="neutron-db-sync" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.373558 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ce5745d-f81f-4de1-b663-f7469255c903" containerName="neutron-db-sync" Nov 21 15:34:04 crc kubenswrapper[4774]: E1121 15:34:04.373568 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="extract-content" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.373575 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="extract-content" Nov 21 15:34:04 crc kubenswrapper[4774]: E1121 15:34:04.373600 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="extract-utilities" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.373608 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="extract-utilities" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.373773 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ce5745d-f81f-4de1-b663-f7469255c903" containerName="neutron-db-sync" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.373783 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab9b75f8-cc84-4800-9a17-81ac70b4fca5" containerName="registry-server" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.378981 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.402571 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8bd7f9b4c-mvndp"] Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.447090 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5f6c49b5df-znqbm"] Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.449053 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.451486 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.451718 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-gkmrc" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.451923 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.464555 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f6c49b5df-znqbm"] Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481367 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-sb\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481474 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-nb\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481575 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh95k\" (UniqueName: \"kubernetes.io/projected/8ad440af-a12e-4062-bd1c-443f7f6638b2-kube-api-access-zh95k\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481624 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-config\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481654 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-dns-svc\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481687 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-config\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481716 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxhc4\" (UniqueName: \"kubernetes.io/projected/6363746f-24af-4fef-918b-395b65ba5242-kube-api-access-qxhc4\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481747 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-httpd-config\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.481771 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-combined-ca-bundle\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584418 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-config\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584471 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-dns-svc\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584510 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-config\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584564 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxhc4\" (UniqueName: \"kubernetes.io/projected/6363746f-24af-4fef-918b-395b65ba5242-kube-api-access-qxhc4\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584609 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-httpd-config\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584637 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-combined-ca-bundle\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584687 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-sb\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584719 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-nb\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.584782 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh95k\" (UniqueName: \"kubernetes.io/projected/8ad440af-a12e-4062-bd1c-443f7f6638b2-kube-api-access-zh95k\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.586683 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-nb\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.587364 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-sb\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.587388 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-dns-svc\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.587432 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-config\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.593274 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-combined-ca-bundle\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.593343 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-config\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.611390 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6363746f-24af-4fef-918b-395b65ba5242-httpd-config\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.611432 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh95k\" (UniqueName: \"kubernetes.io/projected/8ad440af-a12e-4062-bd1c-443f7f6638b2-kube-api-access-zh95k\") pod \"dnsmasq-dns-8bd7f9b4c-mvndp\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.618108 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxhc4\" (UniqueName: \"kubernetes.io/projected/6363746f-24af-4fef-918b-395b65ba5242-kube-api-access-qxhc4\") pod \"neutron-5f6c49b5df-znqbm\" (UID: \"6363746f-24af-4fef-918b-395b65ba5242\") " pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.709601 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:04 crc kubenswrapper[4774]: I1121 15:34:04.783211 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:05 crc kubenswrapper[4774]: I1121 15:34:05.156581 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"4849e8220ea36f19f58def5dba0778aa648235f180867dd4feddda2e2ae19099"} Nov 21 15:34:05 crc kubenswrapper[4774]: I1121 15:34:05.277916 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8bd7f9b4c-mvndp"] Nov 21 15:34:05 crc kubenswrapper[4774]: W1121 15:34:05.287467 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ad440af_a12e_4062_bd1c_443f7f6638b2.slice/crio-fa5f146d81abd2a3e26ea0a8105881f5a01908e464d55f54dacff7a2df69103e WatchSource:0}: Error finding container fa5f146d81abd2a3e26ea0a8105881f5a01908e464d55f54dacff7a2df69103e: Status 404 returned error can't find the container with id fa5f146d81abd2a3e26ea0a8105881f5a01908e464d55f54dacff7a2df69103e Nov 21 15:34:05 crc kubenswrapper[4774]: I1121 15:34:05.504332 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f6c49b5df-znqbm"] Nov 21 15:34:05 crc kubenswrapper[4774]: W1121 15:34:05.507748 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6363746f_24af_4fef_918b_395b65ba5242.slice/crio-cde79ac3aa42098156b0fbaed54a4fabc38a4031e1c04b2bd8fa8013af38abfc WatchSource:0}: Error finding container cde79ac3aa42098156b0fbaed54a4fabc38a4031e1c04b2bd8fa8013af38abfc: Status 404 returned error can't find the container with id cde79ac3aa42098156b0fbaed54a4fabc38a4031e1c04b2bd8fa8013af38abfc Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.165650 4774 generic.go:334] "Generic (PLEG): container finished" podID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerID="fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb" exitCode=0 Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.166237 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" event={"ID":"8ad440af-a12e-4062-bd1c-443f7f6638b2","Type":"ContainerDied","Data":"fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb"} Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.166267 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" event={"ID":"8ad440af-a12e-4062-bd1c-443f7f6638b2","Type":"ContainerStarted","Data":"fa5f146d81abd2a3e26ea0a8105881f5a01908e464d55f54dacff7a2df69103e"} Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.169976 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f6c49b5df-znqbm" event={"ID":"6363746f-24af-4fef-918b-395b65ba5242","Type":"ContainerStarted","Data":"5339800266e3f2ebe5db0e6f1bdaa0c1c03e5777381ef8cf0adebcf7bc93a972"} Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.170022 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f6c49b5df-znqbm" event={"ID":"6363746f-24af-4fef-918b-395b65ba5242","Type":"ContainerStarted","Data":"760b5691c89f83285509882afd7c13b2a5188c9f1b8ada4f8f27987e648cf7dd"} Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.170034 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f6c49b5df-znqbm" event={"ID":"6363746f-24af-4fef-918b-395b65ba5242","Type":"ContainerStarted","Data":"cde79ac3aa42098156b0fbaed54a4fabc38a4031e1c04b2bd8fa8013af38abfc"} Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.170765 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:06 crc kubenswrapper[4774]: I1121 15:34:06.219209 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5f6c49b5df-znqbm" podStartSLOduration=2.21918493 podStartE2EDuration="2.21918493s" podCreationTimestamp="2025-11-21 15:34:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:34:06.21462935 +0000 UTC m=+5436.866828609" watchObservedRunningTime="2025-11-21 15:34:06.21918493 +0000 UTC m=+5436.871384189" Nov 21 15:34:07 crc kubenswrapper[4774]: I1121 15:34:07.184278 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" event={"ID":"8ad440af-a12e-4062-bd1c-443f7f6638b2","Type":"ContainerStarted","Data":"4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938"} Nov 21 15:34:07 crc kubenswrapper[4774]: I1121 15:34:07.185089 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:07 crc kubenswrapper[4774]: I1121 15:34:07.209855 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" podStartSLOduration=3.209829799 podStartE2EDuration="3.209829799s" podCreationTimestamp="2025-11-21 15:34:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:34:07.206773491 +0000 UTC m=+5437.858972740" watchObservedRunningTime="2025-11-21 15:34:07.209829799 +0000 UTC m=+5437.862029068" Nov 21 15:34:14 crc kubenswrapper[4774]: I1121 15:34:14.712190 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:34:14 crc kubenswrapper[4774]: I1121 15:34:14.789241 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b474d4c-5c9bt"] Nov 21 15:34:14 crc kubenswrapper[4774]: I1121 15:34:14.789478 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerName="dnsmasq-dns" containerID="cri-o://b0ff764da328ba11b7ef5f7d205475a9215fb64c167264937a5946b4631a2590" gracePeriod=10 Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.288302 4774 generic.go:334] "Generic (PLEG): container finished" podID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerID="b0ff764da328ba11b7ef5f7d205475a9215fb64c167264937a5946b4631a2590" exitCode=0 Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.288413 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" event={"ID":"5fe8ce53-fc65-4c16-821e-fa1467f6bf30","Type":"ContainerDied","Data":"b0ff764da328ba11b7ef5f7d205475a9215fb64c167264937a5946b4631a2590"} Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.288655 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" event={"ID":"5fe8ce53-fc65-4c16-821e-fa1467f6bf30","Type":"ContainerDied","Data":"92589b45bc273f56e3f89771302f156ccb2a0f28dab0fb0e8b79379552139a18"} Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.288671 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92589b45bc273f56e3f89771302f156ccb2a0f28dab0fb0e8b79379552139a18" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.294625 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.405216 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vttd7\" (UniqueName: \"kubernetes.io/projected/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-kube-api-access-vttd7\") pod \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.405274 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-nb\") pod \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.405298 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-sb\") pod \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.405325 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-config\") pod \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.405441 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-dns-svc\") pod \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\" (UID: \"5fe8ce53-fc65-4c16-821e-fa1467f6bf30\") " Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.411615 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-kube-api-access-vttd7" (OuterVolumeSpecName: "kube-api-access-vttd7") pod "5fe8ce53-fc65-4c16-821e-fa1467f6bf30" (UID: "5fe8ce53-fc65-4c16-821e-fa1467f6bf30"). InnerVolumeSpecName "kube-api-access-vttd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.450830 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5fe8ce53-fc65-4c16-821e-fa1467f6bf30" (UID: "5fe8ce53-fc65-4c16-821e-fa1467f6bf30"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.452843 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-config" (OuterVolumeSpecName: "config") pod "5fe8ce53-fc65-4c16-821e-fa1467f6bf30" (UID: "5fe8ce53-fc65-4c16-821e-fa1467f6bf30"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.456656 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5fe8ce53-fc65-4c16-821e-fa1467f6bf30" (UID: "5fe8ce53-fc65-4c16-821e-fa1467f6bf30"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.462647 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5fe8ce53-fc65-4c16-821e-fa1467f6bf30" (UID: "5fe8ce53-fc65-4c16-821e-fa1467f6bf30"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.508604 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vttd7\" (UniqueName: \"kubernetes.io/projected/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-kube-api-access-vttd7\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.508651 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.508664 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.508677 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:15 crc kubenswrapper[4774]: I1121 15:34:15.508689 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5fe8ce53-fc65-4c16-821e-fa1467f6bf30-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:16 crc kubenswrapper[4774]: I1121 15:34:16.295590 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b474d4c-5c9bt" Nov 21 15:34:16 crc kubenswrapper[4774]: I1121 15:34:16.322992 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b474d4c-5c9bt"] Nov 21 15:34:16 crc kubenswrapper[4774]: I1121 15:34:16.330144 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578b474d4c-5c9bt"] Nov 21 15:34:18 crc kubenswrapper[4774]: I1121 15:34:18.108209 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" path="/var/lib/kubelet/pods/5fe8ce53-fc65-4c16-821e-fa1467f6bf30/volumes" Nov 21 15:34:34 crc kubenswrapper[4774]: I1121 15:34:34.791936 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5f6c49b5df-znqbm" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.695724 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-t74fs"] Nov 21 15:34:41 crc kubenswrapper[4774]: E1121 15:34:41.696604 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerName="init" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.696618 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerName="init" Nov 21 15:34:41 crc kubenswrapper[4774]: E1121 15:34:41.696626 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerName="dnsmasq-dns" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.696632 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerName="dnsmasq-dns" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.696804 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fe8ce53-fc65-4c16-821e-fa1467f6bf30" containerName="dnsmasq-dns" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.697425 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.705127 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-a58a-account-create-76brk"] Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.706507 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.715944 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t74fs"] Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.716158 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.721718 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a58a-account-create-76brk"] Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.777593 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-operator-scripts\") pod \"glance-db-create-t74fs\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.777646 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rr6xw\" (UniqueName: \"kubernetes.io/projected/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-kube-api-access-rr6xw\") pod \"glance-db-create-t74fs\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.878687 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-operator-scripts\") pod \"glance-db-create-t74fs\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.878751 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rr6xw\" (UniqueName: \"kubernetes.io/projected/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-kube-api-access-rr6xw\") pod \"glance-db-create-t74fs\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.878836 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53dc7a4a-0286-432c-a2c0-6c9cab003290-operator-scripts\") pod \"glance-a58a-account-create-76brk\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.878949 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpmsv\" (UniqueName: \"kubernetes.io/projected/53dc7a4a-0286-432c-a2c0-6c9cab003290-kube-api-access-jpmsv\") pod \"glance-a58a-account-create-76brk\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.879752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-operator-scripts\") pod \"glance-db-create-t74fs\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.904067 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rr6xw\" (UniqueName: \"kubernetes.io/projected/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-kube-api-access-rr6xw\") pod \"glance-db-create-t74fs\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " pod="openstack/glance-db-create-t74fs" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.979980 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpmsv\" (UniqueName: \"kubernetes.io/projected/53dc7a4a-0286-432c-a2c0-6c9cab003290-kube-api-access-jpmsv\") pod \"glance-a58a-account-create-76brk\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.980092 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53dc7a4a-0286-432c-a2c0-6c9cab003290-operator-scripts\") pod \"glance-a58a-account-create-76brk\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.981231 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53dc7a4a-0286-432c-a2c0-6c9cab003290-operator-scripts\") pod \"glance-a58a-account-create-76brk\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:41 crc kubenswrapper[4774]: I1121 15:34:41.998680 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpmsv\" (UniqueName: \"kubernetes.io/projected/53dc7a4a-0286-432c-a2c0-6c9cab003290-kube-api-access-jpmsv\") pod \"glance-a58a-account-create-76brk\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:42 crc kubenswrapper[4774]: I1121 15:34:42.026478 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t74fs" Nov 21 15:34:42 crc kubenswrapper[4774]: I1121 15:34:42.041595 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:42 crc kubenswrapper[4774]: I1121 15:34:42.469162 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-t74fs"] Nov 21 15:34:42 crc kubenswrapper[4774]: W1121 15:34:42.473067 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f1ec2bf_6208_463d_bf73_d1ec3263cdef.slice/crio-d4b12b3e3438ff0c0f5d0d4dabd6b2748c17603a7d78a0dd416f2d5d8a33951d WatchSource:0}: Error finding container d4b12b3e3438ff0c0f5d0d4dabd6b2748c17603a7d78a0dd416f2d5d8a33951d: Status 404 returned error can't find the container with id d4b12b3e3438ff0c0f5d0d4dabd6b2748c17603a7d78a0dd416f2d5d8a33951d Nov 21 15:34:42 crc kubenswrapper[4774]: I1121 15:34:42.532013 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-a58a-account-create-76brk"] Nov 21 15:34:42 crc kubenswrapper[4774]: I1121 15:34:42.553317 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t74fs" event={"ID":"1f1ec2bf-6208-463d-bf73-d1ec3263cdef","Type":"ContainerStarted","Data":"d4b12b3e3438ff0c0f5d0d4dabd6b2748c17603a7d78a0dd416f2d5d8a33951d"} Nov 21 15:34:42 crc kubenswrapper[4774]: I1121 15:34:42.556314 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a58a-account-create-76brk" event={"ID":"53dc7a4a-0286-432c-a2c0-6c9cab003290","Type":"ContainerStarted","Data":"44cb2ab57790d414b53b36a9624ee44edf9618eda5803dd1eeeac37c6897f8b0"} Nov 21 15:34:43 crc kubenswrapper[4774]: I1121 15:34:43.571797 4774 generic.go:334] "Generic (PLEG): container finished" podID="1f1ec2bf-6208-463d-bf73-d1ec3263cdef" containerID="1c4cb192de35ed0ac5a46dfc464a4dc19a2b5104da2bb38fb403db82e0ad7759" exitCode=0 Nov 21 15:34:43 crc kubenswrapper[4774]: I1121 15:34:43.571938 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t74fs" event={"ID":"1f1ec2bf-6208-463d-bf73-d1ec3263cdef","Type":"ContainerDied","Data":"1c4cb192de35ed0ac5a46dfc464a4dc19a2b5104da2bb38fb403db82e0ad7759"} Nov 21 15:34:43 crc kubenswrapper[4774]: I1121 15:34:43.575049 4774 generic.go:334] "Generic (PLEG): container finished" podID="53dc7a4a-0286-432c-a2c0-6c9cab003290" containerID="a5a7334c429102027fa706dde1a6770f13fc69a405d16fafc086400cf06ab22a" exitCode=0 Nov 21 15:34:43 crc kubenswrapper[4774]: I1121 15:34:43.575115 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a58a-account-create-76brk" event={"ID":"53dc7a4a-0286-432c-a2c0-6c9cab003290","Type":"ContainerDied","Data":"a5a7334c429102027fa706dde1a6770f13fc69a405d16fafc086400cf06ab22a"} Nov 21 15:34:44 crc kubenswrapper[4774]: I1121 15:34:44.956985 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:44 crc kubenswrapper[4774]: I1121 15:34:44.962258 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t74fs" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.055151 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53dc7a4a-0286-432c-a2c0-6c9cab003290-operator-scripts\") pod \"53dc7a4a-0286-432c-a2c0-6c9cab003290\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.055715 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jpmsv\" (UniqueName: \"kubernetes.io/projected/53dc7a4a-0286-432c-a2c0-6c9cab003290-kube-api-access-jpmsv\") pod \"53dc7a4a-0286-432c-a2c0-6c9cab003290\" (UID: \"53dc7a4a-0286-432c-a2c0-6c9cab003290\") " Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.055930 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53dc7a4a-0286-432c-a2c0-6c9cab003290-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "53dc7a4a-0286-432c-a2c0-6c9cab003290" (UID: "53dc7a4a-0286-432c-a2c0-6c9cab003290"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.056684 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53dc7a4a-0286-432c-a2c0-6c9cab003290-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.062142 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53dc7a4a-0286-432c-a2c0-6c9cab003290-kube-api-access-jpmsv" (OuterVolumeSpecName: "kube-api-access-jpmsv") pod "53dc7a4a-0286-432c-a2c0-6c9cab003290" (UID: "53dc7a4a-0286-432c-a2c0-6c9cab003290"). InnerVolumeSpecName "kube-api-access-jpmsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.157922 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-operator-scripts\") pod \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.158275 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rr6xw\" (UniqueName: \"kubernetes.io/projected/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-kube-api-access-rr6xw\") pod \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\" (UID: \"1f1ec2bf-6208-463d-bf73-d1ec3263cdef\") " Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.158500 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f1ec2bf-6208-463d-bf73-d1ec3263cdef" (UID: "1f1ec2bf-6208-463d-bf73-d1ec3263cdef"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.158801 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jpmsv\" (UniqueName: \"kubernetes.io/projected/53dc7a4a-0286-432c-a2c0-6c9cab003290-kube-api-access-jpmsv\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.158927 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.161119 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-kube-api-access-rr6xw" (OuterVolumeSpecName: "kube-api-access-rr6xw") pod "1f1ec2bf-6208-463d-bf73-d1ec3263cdef" (UID: "1f1ec2bf-6208-463d-bf73-d1ec3263cdef"). InnerVolumeSpecName "kube-api-access-rr6xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.262400 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rr6xw\" (UniqueName: \"kubernetes.io/projected/1f1ec2bf-6208-463d-bf73-d1ec3263cdef-kube-api-access-rr6xw\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.593853 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-t74fs" event={"ID":"1f1ec2bf-6208-463d-bf73-d1ec3263cdef","Type":"ContainerDied","Data":"d4b12b3e3438ff0c0f5d0d4dabd6b2748c17603a7d78a0dd416f2d5d8a33951d"} Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.593913 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4b12b3e3438ff0c0f5d0d4dabd6b2748c17603a7d78a0dd416f2d5d8a33951d" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.593988 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-t74fs" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.595235 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-a58a-account-create-76brk" event={"ID":"53dc7a4a-0286-432c-a2c0-6c9cab003290","Type":"ContainerDied","Data":"44cb2ab57790d414b53b36a9624ee44edf9618eda5803dd1eeeac37c6897f8b0"} Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.595258 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44cb2ab57790d414b53b36a9624ee44edf9618eda5803dd1eeeac37c6897f8b0" Nov 21 15:34:45 crc kubenswrapper[4774]: I1121 15:34:45.595299 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-a58a-account-create-76brk" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.845773 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-b97th"] Nov 21 15:34:46 crc kubenswrapper[4774]: E1121 15:34:46.846689 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f1ec2bf-6208-463d-bf73-d1ec3263cdef" containerName="mariadb-database-create" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.846715 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f1ec2bf-6208-463d-bf73-d1ec3263cdef" containerName="mariadb-database-create" Nov 21 15:34:46 crc kubenswrapper[4774]: E1121 15:34:46.846730 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53dc7a4a-0286-432c-a2c0-6c9cab003290" containerName="mariadb-account-create" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.846742 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="53dc7a4a-0286-432c-a2c0-6c9cab003290" containerName="mariadb-account-create" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.847094 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f1ec2bf-6208-463d-bf73-d1ec3263cdef" containerName="mariadb-database-create" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.847120 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="53dc7a4a-0286-432c-a2c0-6c9cab003290" containerName="mariadb-account-create" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.848150 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-b97th" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.849908 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.850364 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4l7bh" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.854485 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-b97th"] Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.993410 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-db-sync-config-data\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.993531 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-combined-ca-bundle\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.993841 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78dvl\" (UniqueName: \"kubernetes.io/projected/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-kube-api-access-78dvl\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:46 crc kubenswrapper[4774]: I1121 15:34:46.993927 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-config-data\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.095430 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78dvl\" (UniqueName: \"kubernetes.io/projected/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-kube-api-access-78dvl\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.095499 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-config-data\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.095569 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-db-sync-config-data\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.095802 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-combined-ca-bundle\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.102769 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-db-sync-config-data\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.108912 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-combined-ca-bundle\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.110864 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-config-data\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.121253 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78dvl\" (UniqueName: \"kubernetes.io/projected/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-kube-api-access-78dvl\") pod \"glance-db-sync-b97th\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.194153 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-b97th" Nov 21 15:34:47 crc kubenswrapper[4774]: I1121 15:34:47.739949 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-b97th"] Nov 21 15:34:48 crc kubenswrapper[4774]: I1121 15:34:48.627219 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-b97th" event={"ID":"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471","Type":"ContainerStarted","Data":"5d8a9b3c8014fb2061d177ab8c45d0e93aae00047dfe2a5cf08976e247c92465"} Nov 21 15:34:48 crc kubenswrapper[4774]: I1121 15:34:48.627567 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-b97th" event={"ID":"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471","Type":"ContainerStarted","Data":"83b7fe1f86cb6390aff3938828ab576d037e9b58d75734614a86c8e1c26e5500"} Nov 21 15:34:48 crc kubenswrapper[4774]: I1121 15:34:48.642582 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-b97th" podStartSLOduration=2.642558657 podStartE2EDuration="2.642558657s" podCreationTimestamp="2025-11-21 15:34:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:34:48.640751275 +0000 UTC m=+5479.292950534" watchObservedRunningTime="2025-11-21 15:34:48.642558657 +0000 UTC m=+5479.294757916" Nov 21 15:34:52 crc kubenswrapper[4774]: I1121 15:34:52.673344 4774 generic.go:334] "Generic (PLEG): container finished" podID="4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" containerID="5d8a9b3c8014fb2061d177ab8c45d0e93aae00047dfe2a5cf08976e247c92465" exitCode=0 Nov 21 15:34:52 crc kubenswrapper[4774]: I1121 15:34:52.673446 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-b97th" event={"ID":"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471","Type":"ContainerDied","Data":"5d8a9b3c8014fb2061d177ab8c45d0e93aae00047dfe2a5cf08976e247c92465"} Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.062134 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-b97th" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.161178 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78dvl\" (UniqueName: \"kubernetes.io/projected/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-kube-api-access-78dvl\") pod \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.161342 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-config-data\") pod \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.161366 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-db-sync-config-data\") pod \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.161447 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-combined-ca-bundle\") pod \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\" (UID: \"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471\") " Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.166911 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" (UID: "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.168288 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-kube-api-access-78dvl" (OuterVolumeSpecName: "kube-api-access-78dvl") pod "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" (UID: "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471"). InnerVolumeSpecName "kube-api-access-78dvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.191636 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" (UID: "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.212394 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-config-data" (OuterVolumeSpecName: "config-data") pod "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" (UID: "4bdd8a09-e5ca-4330-b5d5-cda5f2e46471"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.263560 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.263599 4774 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.263611 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.263619 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78dvl\" (UniqueName: \"kubernetes.io/projected/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471-kube-api-access-78dvl\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.700373 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-b97th" Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.700020 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-b97th" event={"ID":"4bdd8a09-e5ca-4330-b5d5-cda5f2e46471","Type":"ContainerDied","Data":"83b7fe1f86cb6390aff3938828ab576d037e9b58d75734614a86c8e1c26e5500"} Nov 21 15:34:54 crc kubenswrapper[4774]: I1121 15:34:54.701112 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83b7fe1f86cb6390aff3938828ab576d037e9b58d75734614a86c8e1c26e5500" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.052075 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7"] Nov 21 15:34:55 crc kubenswrapper[4774]: E1121 15:34:55.052451 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" containerName="glance-db-sync" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.052467 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" containerName="glance-db-sync" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.052636 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" containerName="glance-db-sync" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.053751 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.077400 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7"] Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.090349 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.103980 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.111338 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.111552 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceph-conf-files" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.111679 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4l7bh" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.111943 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.124234 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.185507 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-dns-svc\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.185715 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkz5v\" (UniqueName: \"kubernetes.io/projected/4d788755-293b-45ec-af03-c3c7ef1ffa87-kube-api-access-kkz5v\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.185778 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.185897 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-config\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.186044 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.191198 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.193004 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.195768 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.205383 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkz5v\" (UniqueName: \"kubernetes.io/projected/4d788755-293b-45ec-af03-c3c7ef1ffa87-kube-api-access-kkz5v\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288246 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288346 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-config\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288488 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288524 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-ceph\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288592 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg7w6\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-kube-api-access-bg7w6\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288648 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-logs\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288662 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288710 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-dns-svc\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288735 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-config-data\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288755 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-scripts\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.288790 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.289166 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.289560 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-config\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.290134 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-dns-svc\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.290395 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.308139 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkz5v\" (UniqueName: \"kubernetes.io/projected/4d788755-293b-45ec-af03-c3c7ef1ffa87-kube-api-access-kkz5v\") pod \"dnsmasq-dns-5c7dd4bf4c-v8tx7\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.380028 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390474 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-ceph\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390520 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-logs\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390563 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg7w6\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-kube-api-access-bg7w6\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390599 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390625 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390644 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-logs\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390709 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390770 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-config-data\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390863 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-scripts\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390902 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.390974 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.391494 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-logs\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.391552 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.391520 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.391623 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.391688 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g69p8\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-kube-api-access-g69p8\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.396710 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-config-data\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.397486 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-scripts\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.397609 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.397669 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-ceph\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.412363 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg7w6\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-kube-api-access-bg7w6\") pod \"glance-default-external-api-0\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.444273 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.492950 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.492998 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.493020 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g69p8\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-kube-api-access-g69p8\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.493059 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-logs\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.493122 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.493154 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.493194 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.498017 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-ceph\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.498495 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-logs\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.498667 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.498853 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.501135 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.512126 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.520962 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g69p8\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-kube-api-access-g69p8\") pod \"glance-default-internal-api-0\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.813675 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.908611 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7"] Nov 21 15:34:55 crc kubenswrapper[4774]: I1121 15:34:55.944660 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.064963 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.405106 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:34:56 crc kubenswrapper[4774]: W1121 15:34:56.431088 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d3107d8_7b8a_415b_9afe_fa251cc56498.slice/crio-af92b97541b205ebbbe871fc867e0e9d1513709f7376f77b65eba3661ec83d47 WatchSource:0}: Error finding container af92b97541b205ebbbe871fc867e0e9d1513709f7376f77b65eba3661ec83d47: Status 404 returned error can't find the container with id af92b97541b205ebbbe871fc867e0e9d1513709f7376f77b65eba3661ec83d47 Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.751544 4774 generic.go:334] "Generic (PLEG): container finished" podID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerID="966a5461e4d49c39bc80232d65ecf48a60b6552d861302cfce95a774c52b0454" exitCode=0 Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.751636 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" event={"ID":"4d788755-293b-45ec-af03-c3c7ef1ffa87","Type":"ContainerDied","Data":"966a5461e4d49c39bc80232d65ecf48a60b6552d861302cfce95a774c52b0454"} Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.751665 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" event={"ID":"4d788755-293b-45ec-af03-c3c7ef1ffa87","Type":"ContainerStarted","Data":"e7a430a095af804458fb593b0ab29bb05b280860674f3bdcf5145005daa596aa"} Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.753848 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f461c4a-8608-4895-8ca9-423d96b7ed73","Type":"ContainerStarted","Data":"7807fdf1bdebe02ee869e6ac1183f2d52d113a71bdf38f3b320f9b6e6efc818a"} Nov 21 15:34:56 crc kubenswrapper[4774]: I1121 15:34:56.755592 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0d3107d8-7b8a-415b-9afe-fa251cc56498","Type":"ContainerStarted","Data":"af92b97541b205ebbbe871fc867e0e9d1513709f7376f77b65eba3661ec83d47"} Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.769022 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f461c4a-8608-4895-8ca9-423d96b7ed73","Type":"ContainerStarted","Data":"bed7f6447981d916a7d66efd331c0c1c72a367ae3ff8a8b3244d6a615c335f2f"} Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.769559 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f461c4a-8608-4895-8ca9-423d96b7ed73","Type":"ContainerStarted","Data":"620b6e523e00288a273366f89ebde8566d36d3828ac3040d031a91140b72ef1c"} Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.769144 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-log" containerID="cri-o://620b6e523e00288a273366f89ebde8566d36d3828ac3040d031a91140b72ef1c" gracePeriod=30 Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.769160 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-httpd" containerID="cri-o://bed7f6447981d916a7d66efd331c0c1c72a367ae3ff8a8b3244d6a615c335f2f" gracePeriod=30 Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.772384 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0d3107d8-7b8a-415b-9afe-fa251cc56498","Type":"ContainerStarted","Data":"e9de68f37e07905d80dffc3e8c2efe809137c2129a64050071dd234626b2fed4"} Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.774541 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" event={"ID":"4d788755-293b-45ec-af03-c3c7ef1ffa87","Type":"ContainerStarted","Data":"fc6400a52eb124a4cc62c6db857453863736ba23593e5f02c9170b77bbe5961e"} Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.774711 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.811574 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" podStartSLOduration=2.811559343 podStartE2EDuration="2.811559343s" podCreationTimestamp="2025-11-21 15:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:34:57.809760831 +0000 UTC m=+5488.461960090" watchObservedRunningTime="2025-11-21 15:34:57.811559343 +0000 UTC m=+5488.463758602" Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.812384 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=2.8123793360000002 podStartE2EDuration="2.812379336s" podCreationTimestamp="2025-11-21 15:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:34:57.795907386 +0000 UTC m=+5488.448106655" watchObservedRunningTime="2025-11-21 15:34:57.812379336 +0000 UTC m=+5488.464578595" Nov 21 15:34:57 crc kubenswrapper[4774]: I1121 15:34:57.978475 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.784276 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0d3107d8-7b8a-415b-9afe-fa251cc56498","Type":"ContainerStarted","Data":"4a414a0972e64e4dffb86d7551437a85e7e30cfff5b54437743e28f4e227bceb"} Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.784419 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-log" containerID="cri-o://e9de68f37e07905d80dffc3e8c2efe809137c2129a64050071dd234626b2fed4" gracePeriod=30 Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.784459 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-httpd" containerID="cri-o://4a414a0972e64e4dffb86d7551437a85e7e30cfff5b54437743e28f4e227bceb" gracePeriod=30 Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.788304 4774 generic.go:334] "Generic (PLEG): container finished" podID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerID="bed7f6447981d916a7d66efd331c0c1c72a367ae3ff8a8b3244d6a615c335f2f" exitCode=143 Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.788353 4774 generic.go:334] "Generic (PLEG): container finished" podID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerID="620b6e523e00288a273366f89ebde8566d36d3828ac3040d031a91140b72ef1c" exitCode=143 Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.788913 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f461c4a-8608-4895-8ca9-423d96b7ed73","Type":"ContainerDied","Data":"bed7f6447981d916a7d66efd331c0c1c72a367ae3ff8a8b3244d6a615c335f2f"} Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.789270 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f461c4a-8608-4895-8ca9-423d96b7ed73","Type":"ContainerDied","Data":"620b6e523e00288a273366f89ebde8566d36d3828ac3040d031a91140b72ef1c"} Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.810603 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.810587291 podStartE2EDuration="3.810587291s" podCreationTimestamp="2025-11-21 15:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:34:58.810471228 +0000 UTC m=+5489.462670487" watchObservedRunningTime="2025-11-21 15:34:58.810587291 +0000 UTC m=+5489.462786550" Nov 21 15:34:58 crc kubenswrapper[4774]: I1121 15:34:58.897934 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060554 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-config-data\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060610 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg7w6\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-kube-api-access-bg7w6\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060658 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-logs\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060709 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-ceph\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060744 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-combined-ca-bundle\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060788 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-scripts\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.060832 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-httpd-run\") pod \"9f461c4a-8608-4895-8ca9-423d96b7ed73\" (UID: \"9f461c4a-8608-4895-8ca9-423d96b7ed73\") " Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.061167 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-logs" (OuterVolumeSpecName: "logs") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.061347 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.061526 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.061545 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f461c4a-8608-4895-8ca9-423d96b7ed73-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.067220 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-scripts" (OuterVolumeSpecName: "scripts") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.067577 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-kube-api-access-bg7w6" (OuterVolumeSpecName: "kube-api-access-bg7w6") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "kube-api-access-bg7w6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.068944 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-ceph" (OuterVolumeSpecName: "ceph") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.092728 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.106122 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-config-data" (OuterVolumeSpecName: "config-data") pod "9f461c4a-8608-4895-8ca9-423d96b7ed73" (UID: "9f461c4a-8608-4895-8ca9-423d96b7ed73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.163675 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.163751 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg7w6\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-kube-api-access-bg7w6\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.163766 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/9f461c4a-8608-4895-8ca9-423d96b7ed73-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.163781 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.163810 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f461c4a-8608-4895-8ca9-423d96b7ed73-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.809024 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9f461c4a-8608-4895-8ca9-423d96b7ed73","Type":"ContainerDied","Data":"7807fdf1bdebe02ee869e6ac1183f2d52d113a71bdf38f3b320f9b6e6efc818a"} Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.809393 4774 scope.go:117] "RemoveContainer" containerID="bed7f6447981d916a7d66efd331c0c1c72a367ae3ff8a8b3244d6a615c335f2f" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.809073 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.812150 4774 generic.go:334] "Generic (PLEG): container finished" podID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerID="4a414a0972e64e4dffb86d7551437a85e7e30cfff5b54437743e28f4e227bceb" exitCode=0 Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.812173 4774 generic.go:334] "Generic (PLEG): container finished" podID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerID="e9de68f37e07905d80dffc3e8c2efe809137c2129a64050071dd234626b2fed4" exitCode=143 Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.812213 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0d3107d8-7b8a-415b-9afe-fa251cc56498","Type":"ContainerDied","Data":"4a414a0972e64e4dffb86d7551437a85e7e30cfff5b54437743e28f4e227bceb"} Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.812261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0d3107d8-7b8a-415b-9afe-fa251cc56498","Type":"ContainerDied","Data":"e9de68f37e07905d80dffc3e8c2efe809137c2129a64050071dd234626b2fed4"} Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.842005 4774 scope.go:117] "RemoveContainer" containerID="620b6e523e00288a273366f89ebde8566d36d3828ac3040d031a91140b72ef1c" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.861393 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.875725 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.888321 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:34:59 crc kubenswrapper[4774]: E1121 15:34:59.888838 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-log" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.888854 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-log" Nov 21 15:34:59 crc kubenswrapper[4774]: E1121 15:34:59.888900 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-httpd" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.888909 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-httpd" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.889140 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-httpd" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.889163 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" containerName="glance-log" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.890352 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.892792 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 21 15:34:59 crc kubenswrapper[4774]: I1121 15:34:59.896278 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.007992 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.080790 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-ceph\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.081284 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-scripts\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.081366 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-config-data\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.081472 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-logs\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.081581 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.081657 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dc6s\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-kube-api-access-6dc6s\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.081736 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.102635 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f461c4a-8608-4895-8ca9-423d96b7ed73" path="/var/lib/kubelet/pods/9f461c4a-8608-4895-8ca9-423d96b7ed73/volumes" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.182800 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-combined-ca-bundle\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.182909 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-logs\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183159 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-ceph\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183238 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-scripts\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183366 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g69p8\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-kube-api-access-g69p8\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183420 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-httpd-run\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183448 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-config-data\") pod \"0d3107d8-7b8a-415b-9afe-fa251cc56498\" (UID: \"0d3107d8-7b8a-415b-9afe-fa251cc56498\") " Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183589 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-logs" (OuterVolumeSpecName: "logs") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183874 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-logs\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.183895 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184074 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184142 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dc6s\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-kube-api-access-6dc6s\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184205 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184453 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-logs\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184483 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-ceph\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184552 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-scripts\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184576 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-config-data\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184676 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184699 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d3107d8-7b8a-415b-9afe-fa251cc56498-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.184718 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.190013 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-scripts" (OuterVolumeSpecName: "scripts") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.190485 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-kube-api-access-g69p8" (OuterVolumeSpecName: "kube-api-access-g69p8") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "kube-api-access-g69p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.191965 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-config-data\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.198400 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-ceph" (OuterVolumeSpecName: "ceph") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.198617 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.199073 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-scripts\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.200414 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-ceph\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.205241 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dc6s\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-kube-api-access-6dc6s\") pod \"glance-default-external-api-0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.216217 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.235807 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-config-data" (OuterVolumeSpecName: "config-data") pod "0d3107d8-7b8a-415b-9afe-fa251cc56498" (UID: "0d3107d8-7b8a-415b-9afe-fa251cc56498"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.287245 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.287359 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.287393 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.287415 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0d3107d8-7b8a-415b-9afe-fa251cc56498-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.287439 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g69p8\" (UniqueName: \"kubernetes.io/projected/0d3107d8-7b8a-415b-9afe-fa251cc56498-kube-api-access-g69p8\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.301403 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.822905 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.822890 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0d3107d8-7b8a-415b-9afe-fa251cc56498","Type":"ContainerDied","Data":"af92b97541b205ebbbe871fc867e0e9d1513709f7376f77b65eba3661ec83d47"} Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.823409 4774 scope.go:117] "RemoveContainer" containerID="4a414a0972e64e4dffb86d7551437a85e7e30cfff5b54437743e28f4e227bceb" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.854785 4774 scope.go:117] "RemoveContainer" containerID="e9de68f37e07905d80dffc3e8c2efe809137c2129a64050071dd234626b2fed4" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.868017 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.881422 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.894204 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.905071 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:35:00 crc kubenswrapper[4774]: E1121 15:35:00.905776 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-httpd" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.905793 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-httpd" Nov 21 15:35:00 crc kubenswrapper[4774]: E1121 15:35:00.905867 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-log" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.905876 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-log" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.906128 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-log" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.906144 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" containerName="glance-httpd" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.907473 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.909451 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 21 15:35:00 crc kubenswrapper[4774]: I1121 15:35:00.914500 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.102460 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.102733 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.102802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.102885 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-logs\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.103027 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.103088 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.103117 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb4tm\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-kube-api-access-cb4tm\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205012 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-logs\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205477 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205508 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205532 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-logs\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205536 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb4tm\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-kube-api-access-cb4tm\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205655 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205689 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.205708 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.207053 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.212268 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-ceph\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.212385 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.214239 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.214348 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.222301 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb4tm\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-kube-api-access-cb4tm\") pod \"glance-default-internal-api-0\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.239348 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.816031 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.833413 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6315b9aa-80dc-44e1-860b-64228934f3c4","Type":"ContainerStarted","Data":"fcae138905ccf1c634d2695f1251f185f1e5587560d985a0b8ec4afdc912660e"} Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.835970 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"01940ba5-c2da-40b4-aa60-e07998ef2bb0","Type":"ContainerStarted","Data":"36a7ad8b954f5c270872626ea4bfbc98f113fd14ce768d0617ab3476f0de6ba9"} Nov 21 15:35:01 crc kubenswrapper[4774]: I1121 15:35:01.836354 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"01940ba5-c2da-40b4-aa60-e07998ef2bb0","Type":"ContainerStarted","Data":"88f234ae082d0900aaed5fc58d34efa0b68b9ac86b46b8ab6941c53793b334fa"} Nov 21 15:35:02 crc kubenswrapper[4774]: I1121 15:35:02.116695 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d3107d8-7b8a-415b-9afe-fa251cc56498" path="/var/lib/kubelet/pods/0d3107d8-7b8a-415b-9afe-fa251cc56498/volumes" Nov 21 15:35:02 crc kubenswrapper[4774]: I1121 15:35:02.848434 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6315b9aa-80dc-44e1-860b-64228934f3c4","Type":"ContainerStarted","Data":"5e5877512b194f9170acb775272cc9142553c683e10c1b67ce473e218d02c0e9"} Nov 21 15:35:02 crc kubenswrapper[4774]: I1121 15:35:02.850393 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"01940ba5-c2da-40b4-aa60-e07998ef2bb0","Type":"ContainerStarted","Data":"c22eba4bc0a1620eb9dcf19688edc011d7205ff5b7093ca303575e24789b6e22"} Nov 21 15:35:02 crc kubenswrapper[4774]: I1121 15:35:02.873144 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.8731272 podStartE2EDuration="3.8731272s" podCreationTimestamp="2025-11-21 15:34:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:35:02.871863144 +0000 UTC m=+5493.524062413" watchObservedRunningTime="2025-11-21 15:35:02.8731272 +0000 UTC m=+5493.525326459" Nov 21 15:35:03 crc kubenswrapper[4774]: I1121 15:35:03.860761 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6315b9aa-80dc-44e1-860b-64228934f3c4","Type":"ContainerStarted","Data":"66bc77eba788a555a7827510ba663b7087faa84356832825035b64638db03638"} Nov 21 15:35:03 crc kubenswrapper[4774]: I1121 15:35:03.881219 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.881201436 podStartE2EDuration="3.881201436s" podCreationTimestamp="2025-11-21 15:35:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:35:03.880392733 +0000 UTC m=+5494.532591992" watchObservedRunningTime="2025-11-21 15:35:03.881201436 +0000 UTC m=+5494.533400695" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.001012 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.079323 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8bd7f9b4c-mvndp"] Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.079613 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerName="dnsmasq-dns" containerID="cri-o://4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938" gracePeriod=10 Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.569546 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.641289 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-sb\") pod \"8ad440af-a12e-4062-bd1c-443f7f6638b2\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.641356 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh95k\" (UniqueName: \"kubernetes.io/projected/8ad440af-a12e-4062-bd1c-443f7f6638b2-kube-api-access-zh95k\") pod \"8ad440af-a12e-4062-bd1c-443f7f6638b2\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.641399 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-config\") pod \"8ad440af-a12e-4062-bd1c-443f7f6638b2\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.641441 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-nb\") pod \"8ad440af-a12e-4062-bd1c-443f7f6638b2\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.641464 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-dns-svc\") pod \"8ad440af-a12e-4062-bd1c-443f7f6638b2\" (UID: \"8ad440af-a12e-4062-bd1c-443f7f6638b2\") " Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.660374 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ad440af-a12e-4062-bd1c-443f7f6638b2-kube-api-access-zh95k" (OuterVolumeSpecName: "kube-api-access-zh95k") pod "8ad440af-a12e-4062-bd1c-443f7f6638b2" (UID: "8ad440af-a12e-4062-bd1c-443f7f6638b2"). InnerVolumeSpecName "kube-api-access-zh95k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.744525 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh95k\" (UniqueName: \"kubernetes.io/projected/8ad440af-a12e-4062-bd1c-443f7f6638b2-kube-api-access-zh95k\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.755799 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-config" (OuterVolumeSpecName: "config") pod "8ad440af-a12e-4062-bd1c-443f7f6638b2" (UID: "8ad440af-a12e-4062-bd1c-443f7f6638b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.780515 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8ad440af-a12e-4062-bd1c-443f7f6638b2" (UID: "8ad440af-a12e-4062-bd1c-443f7f6638b2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.797994 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8ad440af-a12e-4062-bd1c-443f7f6638b2" (UID: "8ad440af-a12e-4062-bd1c-443f7f6638b2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.801466 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8ad440af-a12e-4062-bd1c-443f7f6638b2" (UID: "8ad440af-a12e-4062-bd1c-443f7f6638b2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.846149 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.846191 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.846206 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:06 crc kubenswrapper[4774]: I1121 15:35:06.846220 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ad440af-a12e-4062-bd1c-443f7f6638b2-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.034610 4774 generic.go:334] "Generic (PLEG): container finished" podID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerID="4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938" exitCode=0 Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.034936 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" event={"ID":"8ad440af-a12e-4062-bd1c-443f7f6638b2","Type":"ContainerDied","Data":"4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938"} Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.034971 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" event={"ID":"8ad440af-a12e-4062-bd1c-443f7f6638b2","Type":"ContainerDied","Data":"fa5f146d81abd2a3e26ea0a8105881f5a01908e464d55f54dacff7a2df69103e"} Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.034994 4774 scope.go:117] "RemoveContainer" containerID="4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.035159 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8bd7f9b4c-mvndp" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.061094 4774 scope.go:117] "RemoveContainer" containerID="fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.076472 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8bd7f9b4c-mvndp"] Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.084153 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8bd7f9b4c-mvndp"] Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.097894 4774 scope.go:117] "RemoveContainer" containerID="4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938" Nov 21 15:35:07 crc kubenswrapper[4774]: E1121 15:35:07.098286 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938\": container with ID starting with 4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938 not found: ID does not exist" containerID="4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.098338 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938"} err="failed to get container status \"4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938\": rpc error: code = NotFound desc = could not find container \"4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938\": container with ID starting with 4d6e72dfb5efb395c17ccbb8dbfdd0a3321116fac4913ae538d666284d2ba938 not found: ID does not exist" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.098375 4774 scope.go:117] "RemoveContainer" containerID="fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb" Nov 21 15:35:07 crc kubenswrapper[4774]: E1121 15:35:07.098648 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb\": container with ID starting with fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb not found: ID does not exist" containerID="fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb" Nov 21 15:35:07 crc kubenswrapper[4774]: I1121 15:35:07.098670 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb"} err="failed to get container status \"fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb\": rpc error: code = NotFound desc = could not find container \"fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb\": container with ID starting with fa8c98ca1832786b2384b116a78a7fdb4ae825d5c44cc29192c6669bbaaedffb not found: ID does not exist" Nov 21 15:35:08 crc kubenswrapper[4774]: I1121 15:35:08.115270 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" path="/var/lib/kubelet/pods/8ad440af-a12e-4062-bd1c-443f7f6638b2/volumes" Nov 21 15:35:10 crc kubenswrapper[4774]: I1121 15:35:10.302974 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 15:35:10 crc kubenswrapper[4774]: I1121 15:35:10.303498 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 15:35:10 crc kubenswrapper[4774]: I1121 15:35:10.354985 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 15:35:10 crc kubenswrapper[4774]: I1121 15:35:10.371512 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 15:35:11 crc kubenswrapper[4774]: I1121 15:35:11.072145 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 15:35:11 crc kubenswrapper[4774]: I1121 15:35:11.072742 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 15:35:11 crc kubenswrapper[4774]: I1121 15:35:11.240595 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:11 crc kubenswrapper[4774]: I1121 15:35:11.240648 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:11 crc kubenswrapper[4774]: I1121 15:35:11.316539 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:11 crc kubenswrapper[4774]: I1121 15:35:11.324236 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:12 crc kubenswrapper[4774]: I1121 15:35:12.081813 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:12 crc kubenswrapper[4774]: I1121 15:35:12.082142 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:13 crc kubenswrapper[4774]: I1121 15:35:13.088680 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 15:35:13 crc kubenswrapper[4774]: I1121 15:35:13.088715 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 15:35:13 crc kubenswrapper[4774]: I1121 15:35:13.126407 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 15:35:13 crc kubenswrapper[4774]: I1121 15:35:13.149035 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 15:35:14 crc kubenswrapper[4774]: I1121 15:35:14.206615 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:14 crc kubenswrapper[4774]: I1121 15:35:14.207235 4774 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 21 15:35:14 crc kubenswrapper[4774]: I1121 15:35:14.247874 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.205746 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-pkbg9"] Nov 21 15:35:20 crc kubenswrapper[4774]: E1121 15:35:20.206795 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerName="dnsmasq-dns" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.206811 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerName="dnsmasq-dns" Nov 21 15:35:20 crc kubenswrapper[4774]: E1121 15:35:20.206854 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerName="init" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.206862 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerName="init" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.207071 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ad440af-a12e-4062-bd1c-443f7f6638b2" containerName="dnsmasq-dns" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.207738 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.228971 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pkbg9"] Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.302490 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-228f-account-create-47lmt"] Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.303670 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.313049 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-228f-account-create-47lmt"] Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.313577 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.388741 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a56a3c85-e130-4514-b602-a94b444454ad-operator-scripts\") pod \"placement-db-create-pkbg9\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.388798 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgb2m\" (UniqueName: \"kubernetes.io/projected/a56a3c85-e130-4514-b602-a94b444454ad-kube-api-access-qgb2m\") pod \"placement-db-create-pkbg9\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.490838 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a56a3c85-e130-4514-b602-a94b444454ad-operator-scripts\") pod \"placement-db-create-pkbg9\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.490894 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgb2m\" (UniqueName: \"kubernetes.io/projected/a56a3c85-e130-4514-b602-a94b444454ad-kube-api-access-qgb2m\") pod \"placement-db-create-pkbg9\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.490945 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvbm8\" (UniqueName: \"kubernetes.io/projected/66350a84-c1ad-4e88-a80e-63d338e03016-kube-api-access-rvbm8\") pod \"placement-228f-account-create-47lmt\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.490992 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66350a84-c1ad-4e88-a80e-63d338e03016-operator-scripts\") pod \"placement-228f-account-create-47lmt\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.491667 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a56a3c85-e130-4514-b602-a94b444454ad-operator-scripts\") pod \"placement-db-create-pkbg9\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.512163 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgb2m\" (UniqueName: \"kubernetes.io/projected/a56a3c85-e130-4514-b602-a94b444454ad-kube-api-access-qgb2m\") pod \"placement-db-create-pkbg9\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.534470 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.592356 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66350a84-c1ad-4e88-a80e-63d338e03016-operator-scripts\") pod \"placement-228f-account-create-47lmt\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.592868 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvbm8\" (UniqueName: \"kubernetes.io/projected/66350a84-c1ad-4e88-a80e-63d338e03016-kube-api-access-rvbm8\") pod \"placement-228f-account-create-47lmt\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.593164 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66350a84-c1ad-4e88-a80e-63d338e03016-operator-scripts\") pod \"placement-228f-account-create-47lmt\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.612063 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvbm8\" (UniqueName: \"kubernetes.io/projected/66350a84-c1ad-4e88-a80e-63d338e03016-kube-api-access-rvbm8\") pod \"placement-228f-account-create-47lmt\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:20 crc kubenswrapper[4774]: I1121 15:35:20.618015 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:21 crc kubenswrapper[4774]: I1121 15:35:21.008339 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pkbg9"] Nov 21 15:35:21 crc kubenswrapper[4774]: I1121 15:35:21.130057 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-228f-account-create-47lmt"] Nov 21 15:35:21 crc kubenswrapper[4774]: I1121 15:35:21.162863 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pkbg9" event={"ID":"a56a3c85-e130-4514-b602-a94b444454ad","Type":"ContainerStarted","Data":"98ba25e738569b38adcec63872ec654674b0653f99c2d321915f219c30e903cb"} Nov 21 15:35:21 crc kubenswrapper[4774]: I1121 15:35:21.167947 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-228f-account-create-47lmt" event={"ID":"66350a84-c1ad-4e88-a80e-63d338e03016","Type":"ContainerStarted","Data":"27c50b7dee014a98322ddf78be4070f0ee9d442bd80858a058da2b077e8bac09"} Nov 21 15:35:22 crc kubenswrapper[4774]: I1121 15:35:22.177293 4774 generic.go:334] "Generic (PLEG): container finished" podID="a56a3c85-e130-4514-b602-a94b444454ad" containerID="2768c28dfbbd275bdd9cfe8a0a5deb4b8b1e8febb1eb57d9a9586cf3b89803f4" exitCode=0 Nov 21 15:35:22 crc kubenswrapper[4774]: I1121 15:35:22.177380 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pkbg9" event={"ID":"a56a3c85-e130-4514-b602-a94b444454ad","Type":"ContainerDied","Data":"2768c28dfbbd275bdd9cfe8a0a5deb4b8b1e8febb1eb57d9a9586cf3b89803f4"} Nov 21 15:35:22 crc kubenswrapper[4774]: I1121 15:35:22.180330 4774 generic.go:334] "Generic (PLEG): container finished" podID="66350a84-c1ad-4e88-a80e-63d338e03016" containerID="448ba4ab0aca6edd03851feb3559a2a615dfc1a2c92696d1b84c17941b929c7a" exitCode=0 Nov 21 15:35:22 crc kubenswrapper[4774]: I1121 15:35:22.180385 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-228f-account-create-47lmt" event={"ID":"66350a84-c1ad-4e88-a80e-63d338e03016","Type":"ContainerDied","Data":"448ba4ab0aca6edd03851feb3559a2a615dfc1a2c92696d1b84c17941b929c7a"} Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.601765 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.606119 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.746865 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66350a84-c1ad-4e88-a80e-63d338e03016-operator-scripts\") pod \"66350a84-c1ad-4e88-a80e-63d338e03016\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.746940 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a56a3c85-e130-4514-b602-a94b444454ad-operator-scripts\") pod \"a56a3c85-e130-4514-b602-a94b444454ad\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.747044 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgb2m\" (UniqueName: \"kubernetes.io/projected/a56a3c85-e130-4514-b602-a94b444454ad-kube-api-access-qgb2m\") pod \"a56a3c85-e130-4514-b602-a94b444454ad\" (UID: \"a56a3c85-e130-4514-b602-a94b444454ad\") " Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.747156 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvbm8\" (UniqueName: \"kubernetes.io/projected/66350a84-c1ad-4e88-a80e-63d338e03016-kube-api-access-rvbm8\") pod \"66350a84-c1ad-4e88-a80e-63d338e03016\" (UID: \"66350a84-c1ad-4e88-a80e-63d338e03016\") " Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.748155 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a56a3c85-e130-4514-b602-a94b444454ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a56a3c85-e130-4514-b602-a94b444454ad" (UID: "a56a3c85-e130-4514-b602-a94b444454ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.748154 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66350a84-c1ad-4e88-a80e-63d338e03016-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66350a84-c1ad-4e88-a80e-63d338e03016" (UID: "66350a84-c1ad-4e88-a80e-63d338e03016"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.752744 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66350a84-c1ad-4e88-a80e-63d338e03016-kube-api-access-rvbm8" (OuterVolumeSpecName: "kube-api-access-rvbm8") pod "66350a84-c1ad-4e88-a80e-63d338e03016" (UID: "66350a84-c1ad-4e88-a80e-63d338e03016"). InnerVolumeSpecName "kube-api-access-rvbm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.753060 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a56a3c85-e130-4514-b602-a94b444454ad-kube-api-access-qgb2m" (OuterVolumeSpecName: "kube-api-access-qgb2m") pod "a56a3c85-e130-4514-b602-a94b444454ad" (UID: "a56a3c85-e130-4514-b602-a94b444454ad"). InnerVolumeSpecName "kube-api-access-qgb2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.849625 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66350a84-c1ad-4e88-a80e-63d338e03016-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.849656 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a56a3c85-e130-4514-b602-a94b444454ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.849666 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgb2m\" (UniqueName: \"kubernetes.io/projected/a56a3c85-e130-4514-b602-a94b444454ad-kube-api-access-qgb2m\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:23 crc kubenswrapper[4774]: I1121 15:35:23.849676 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvbm8\" (UniqueName: \"kubernetes.io/projected/66350a84-c1ad-4e88-a80e-63d338e03016-kube-api-access-rvbm8\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:24 crc kubenswrapper[4774]: I1121 15:35:24.207936 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pkbg9" event={"ID":"a56a3c85-e130-4514-b602-a94b444454ad","Type":"ContainerDied","Data":"98ba25e738569b38adcec63872ec654674b0653f99c2d321915f219c30e903cb"} Nov 21 15:35:24 crc kubenswrapper[4774]: I1121 15:35:24.207975 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98ba25e738569b38adcec63872ec654674b0653f99c2d321915f219c30e903cb" Nov 21 15:35:24 crc kubenswrapper[4774]: I1121 15:35:24.207978 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pkbg9" Nov 21 15:35:24 crc kubenswrapper[4774]: I1121 15:35:24.209245 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-228f-account-create-47lmt" event={"ID":"66350a84-c1ad-4e88-a80e-63d338e03016","Type":"ContainerDied","Data":"27c50b7dee014a98322ddf78be4070f0ee9d442bd80858a058da2b077e8bac09"} Nov 21 15:35:24 crc kubenswrapper[4774]: I1121 15:35:24.209267 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27c50b7dee014a98322ddf78be4070f0ee9d442bd80858a058da2b077e8bac09" Nov 21 15:35:24 crc kubenswrapper[4774]: I1121 15:35:24.209277 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-228f-account-create-47lmt" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.562346 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-568f54959f-kwmrs"] Nov 21 15:35:25 crc kubenswrapper[4774]: E1121 15:35:25.562747 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66350a84-c1ad-4e88-a80e-63d338e03016" containerName="mariadb-account-create" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.562763 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="66350a84-c1ad-4e88-a80e-63d338e03016" containerName="mariadb-account-create" Nov 21 15:35:25 crc kubenswrapper[4774]: E1121 15:35:25.562807 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a56a3c85-e130-4514-b602-a94b444454ad" containerName="mariadb-database-create" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.562920 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a56a3c85-e130-4514-b602-a94b444454ad" containerName="mariadb-database-create" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.563112 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="66350a84-c1ad-4e88-a80e-63d338e03016" containerName="mariadb-account-create" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.563127 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a56a3c85-e130-4514-b602-a94b444454ad" containerName="mariadb-database-create" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.564309 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.577403 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568f54959f-kwmrs"] Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.578624 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-nb\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.578719 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-sb\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.578768 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-dns-svc\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.578810 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-config\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.578843 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9vfq\" (UniqueName: \"kubernetes.io/projected/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-kube-api-access-r9vfq\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.599810 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-lc24s"] Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.601095 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.603400 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.604204 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p7f57" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.604671 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.617288 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-lc24s"] Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.680689 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-combined-ca-bundle\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.680754 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-scripts\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.680908 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-dns-svc\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.680978 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75gxl\" (UniqueName: \"kubernetes.io/projected/6c58e79c-d93c-43d1-bc52-d7ba1de82482-kube-api-access-75gxl\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.681088 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-config\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.681124 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9vfq\" (UniqueName: \"kubernetes.io/projected/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-kube-api-access-r9vfq\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.681166 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c58e79c-d93c-43d1-bc52-d7ba1de82482-logs\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.681195 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-config-data\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.681279 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-nb\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.681503 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-sb\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.682066 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-dns-svc\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.682235 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-nb\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.682384 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-sb\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.683261 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-config\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.697675 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9vfq\" (UniqueName: \"kubernetes.io/projected/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-kube-api-access-r9vfq\") pod \"dnsmasq-dns-568f54959f-kwmrs\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.782854 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-combined-ca-bundle\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.782924 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-scripts\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.782960 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75gxl\" (UniqueName: \"kubernetes.io/projected/6c58e79c-d93c-43d1-bc52-d7ba1de82482-kube-api-access-75gxl\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.783554 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c58e79c-d93c-43d1-bc52-d7ba1de82482-logs\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.783577 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-config-data\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.784108 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c58e79c-d93c-43d1-bc52-d7ba1de82482-logs\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.787797 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-scripts\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.794838 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-combined-ca-bundle\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.795305 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-config-data\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.808988 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75gxl\" (UniqueName: \"kubernetes.io/projected/6c58e79c-d93c-43d1-bc52-d7ba1de82482-kube-api-access-75gxl\") pod \"placement-db-sync-lc24s\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.884967 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:25 crc kubenswrapper[4774]: I1121 15:35:25.922703 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:26 crc kubenswrapper[4774]: I1121 15:35:26.448615 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-568f54959f-kwmrs"] Nov 21 15:35:26 crc kubenswrapper[4774]: W1121 15:35:26.457105 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda74c1583_05f2_4bb9_b1bc_51f1d0e304f3.slice/crio-66e9c01c298d6e3b574e51c3d4c396364c9902cbd336c89786c880525e85a2b8 WatchSource:0}: Error finding container 66e9c01c298d6e3b574e51c3d4c396364c9902cbd336c89786c880525e85a2b8: Status 404 returned error can't find the container with id 66e9c01c298d6e3b574e51c3d4c396364c9902cbd336c89786c880525e85a2b8 Nov 21 15:35:26 crc kubenswrapper[4774]: I1121 15:35:26.542989 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-lc24s"] Nov 21 15:35:26 crc kubenswrapper[4774]: W1121 15:35:26.557358 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704 WatchSource:0}: Error finding container 643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704: Status 404 returned error can't find the container with id 643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704 Nov 21 15:35:27 crc kubenswrapper[4774]: I1121 15:35:27.246601 4774 generic.go:334] "Generic (PLEG): container finished" podID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerID="fe46e7075cbfdc054f99fb90b456dd662e1863cf7ffcdbf0beb819d6d645cb8b" exitCode=0 Nov 21 15:35:27 crc kubenswrapper[4774]: I1121 15:35:27.247143 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" event={"ID":"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3","Type":"ContainerDied","Data":"fe46e7075cbfdc054f99fb90b456dd662e1863cf7ffcdbf0beb819d6d645cb8b"} Nov 21 15:35:27 crc kubenswrapper[4774]: I1121 15:35:27.247192 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" event={"ID":"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3","Type":"ContainerStarted","Data":"66e9c01c298d6e3b574e51c3d4c396364c9902cbd336c89786c880525e85a2b8"} Nov 21 15:35:27 crc kubenswrapper[4774]: I1121 15:35:27.258744 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lc24s" event={"ID":"6c58e79c-d93c-43d1-bc52-d7ba1de82482","Type":"ContainerStarted","Data":"159f6a9ed9061fbdc774c20ad9884c95c9c7a1089f483b2cf76a6fbb9eb2937c"} Nov 21 15:35:27 crc kubenswrapper[4774]: I1121 15:35:27.258836 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lc24s" event={"ID":"6c58e79c-d93c-43d1-bc52-d7ba1de82482","Type":"ContainerStarted","Data":"643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704"} Nov 21 15:35:27 crc kubenswrapper[4774]: I1121 15:35:27.317734 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-lc24s" podStartSLOduration=2.317712955 podStartE2EDuration="2.317712955s" podCreationTimestamp="2025-11-21 15:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:35:27.309539302 +0000 UTC m=+5517.961738601" watchObservedRunningTime="2025-11-21 15:35:27.317712955 +0000 UTC m=+5517.969912214" Nov 21 15:35:28 crc kubenswrapper[4774]: E1121 15:35:28.010412 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-159f6a9ed9061fbdc774c20ad9884c95c9c7a1089f483b2cf76a6fbb9eb2937c.scope\": RecentStats: unable to find data in memory cache]" Nov 21 15:35:28 crc kubenswrapper[4774]: I1121 15:35:28.269721 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" event={"ID":"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3","Type":"ContainerStarted","Data":"c2ae4e5c91610d5cd557be03b5e25f0e7f52ca8b744ccc6983964f1d334318f4"} Nov 21 15:35:28 crc kubenswrapper[4774]: I1121 15:35:28.270666 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:28 crc kubenswrapper[4774]: I1121 15:35:28.271471 4774 generic.go:334] "Generic (PLEG): container finished" podID="6c58e79c-d93c-43d1-bc52-d7ba1de82482" containerID="159f6a9ed9061fbdc774c20ad9884c95c9c7a1089f483b2cf76a6fbb9eb2937c" exitCode=0 Nov 21 15:35:28 crc kubenswrapper[4774]: I1121 15:35:28.271503 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lc24s" event={"ID":"6c58e79c-d93c-43d1-bc52-d7ba1de82482","Type":"ContainerDied","Data":"159f6a9ed9061fbdc774c20ad9884c95c9c7a1089f483b2cf76a6fbb9eb2937c"} Nov 21 15:35:28 crc kubenswrapper[4774]: I1121 15:35:28.289913 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" podStartSLOduration=3.289891228 podStartE2EDuration="3.289891228s" podCreationTimestamp="2025-11-21 15:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:35:28.285868644 +0000 UTC m=+5518.938067903" watchObservedRunningTime="2025-11-21 15:35:28.289891228 +0000 UTC m=+5518.942090487" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.643261 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.752015 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-combined-ca-bundle\") pod \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.752436 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-scripts\") pod \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.752490 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-config-data\") pod \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.752545 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75gxl\" (UniqueName: \"kubernetes.io/projected/6c58e79c-d93c-43d1-bc52-d7ba1de82482-kube-api-access-75gxl\") pod \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.752574 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c58e79c-d93c-43d1-bc52-d7ba1de82482-logs\") pod \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\" (UID: \"6c58e79c-d93c-43d1-bc52-d7ba1de82482\") " Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.753015 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c58e79c-d93c-43d1-bc52-d7ba1de82482-logs" (OuterVolumeSpecName: "logs") pod "6c58e79c-d93c-43d1-bc52-d7ba1de82482" (UID: "6c58e79c-d93c-43d1-bc52-d7ba1de82482"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.757967 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-scripts" (OuterVolumeSpecName: "scripts") pod "6c58e79c-d93c-43d1-bc52-d7ba1de82482" (UID: "6c58e79c-d93c-43d1-bc52-d7ba1de82482"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.760062 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c58e79c-d93c-43d1-bc52-d7ba1de82482-kube-api-access-75gxl" (OuterVolumeSpecName: "kube-api-access-75gxl") pod "6c58e79c-d93c-43d1-bc52-d7ba1de82482" (UID: "6c58e79c-d93c-43d1-bc52-d7ba1de82482"). InnerVolumeSpecName "kube-api-access-75gxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.774446 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c58e79c-d93c-43d1-bc52-d7ba1de82482" (UID: "6c58e79c-d93c-43d1-bc52-d7ba1de82482"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.780193 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-config-data" (OuterVolumeSpecName: "config-data") pod "6c58e79c-d93c-43d1-bc52-d7ba1de82482" (UID: "6c58e79c-d93c-43d1-bc52-d7ba1de82482"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.854546 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.854588 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.854600 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c58e79c-d93c-43d1-bc52-d7ba1de82482-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.854613 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75gxl\" (UniqueName: \"kubernetes.io/projected/6c58e79c-d93c-43d1-bc52-d7ba1de82482-kube-api-access-75gxl\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:29 crc kubenswrapper[4774]: I1121 15:35:29.854628 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c58e79c-d93c-43d1-bc52-d7ba1de82482-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.291989 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-lc24s" event={"ID":"6c58e79c-d93c-43d1-bc52-d7ba1de82482","Type":"ContainerDied","Data":"643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704"} Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.292048 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.292054 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-lc24s" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.749024 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-64f867456d-c2t4b"] Nov 21 15:35:30 crc kubenswrapper[4774]: E1121 15:35:30.750091 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c58e79c-d93c-43d1-bc52-d7ba1de82482" containerName="placement-db-sync" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.750113 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c58e79c-d93c-43d1-bc52-d7ba1de82482" containerName="placement-db-sync" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.750698 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c58e79c-d93c-43d1-bc52-d7ba1de82482" containerName="placement-db-sync" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.764221 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.765701 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-64f867456d-c2t4b"] Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.767677 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.768321 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.768434 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-p7f57" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.871836 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-combined-ca-bundle\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.871912 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25x79\" (UniqueName: \"kubernetes.io/projected/c1f65da0-5cc3-4448-96b0-61e27f2506cb-kube-api-access-25x79\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.871963 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-scripts\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.872000 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-config-data\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.872057 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1f65da0-5cc3-4448-96b0-61e27f2506cb-logs\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.973147 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-config-data\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.973479 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1f65da0-5cc3-4448-96b0-61e27f2506cb-logs\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.973551 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-combined-ca-bundle\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.973590 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25x79\" (UniqueName: \"kubernetes.io/projected/c1f65da0-5cc3-4448-96b0-61e27f2506cb-kube-api-access-25x79\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.973649 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-scripts\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.974006 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1f65da0-5cc3-4448-96b0-61e27f2506cb-logs\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.978423 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-scripts\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.978658 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-config-data\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.980020 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1f65da0-5cc3-4448-96b0-61e27f2506cb-combined-ca-bundle\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:30 crc kubenswrapper[4774]: I1121 15:35:30.995671 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25x79\" (UniqueName: \"kubernetes.io/projected/c1f65da0-5cc3-4448-96b0-61e27f2506cb-kube-api-access-25x79\") pod \"placement-64f867456d-c2t4b\" (UID: \"c1f65da0-5cc3-4448-96b0-61e27f2506cb\") " pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:31 crc kubenswrapper[4774]: I1121 15:35:31.084163 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:31 crc kubenswrapper[4774]: I1121 15:35:31.540100 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-64f867456d-c2t4b"] Nov 21 15:35:31 crc kubenswrapper[4774]: W1121 15:35:31.544224 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1f65da0_5cc3_4448_96b0_61e27f2506cb.slice/crio-d9f8e3941fe7bfcdb6aa9101e7ee8c28b7b5088c15493d86ca64ffe091a24b86 WatchSource:0}: Error finding container d9f8e3941fe7bfcdb6aa9101e7ee8c28b7b5088c15493d86ca64ffe091a24b86: Status 404 returned error can't find the container with id d9f8e3941fe7bfcdb6aa9101e7ee8c28b7b5088c15493d86ca64ffe091a24b86 Nov 21 15:35:32 crc kubenswrapper[4774]: I1121 15:35:32.309456 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64f867456d-c2t4b" event={"ID":"c1f65da0-5cc3-4448-96b0-61e27f2506cb","Type":"ContainerStarted","Data":"8e12266523c1abc66bf70e8ea2ee34b91c9cbf49f32712f90b30f4d083e5cf8b"} Nov 21 15:35:32 crc kubenswrapper[4774]: I1121 15:35:32.311058 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:32 crc kubenswrapper[4774]: I1121 15:35:32.311166 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64f867456d-c2t4b" event={"ID":"c1f65da0-5cc3-4448-96b0-61e27f2506cb","Type":"ContainerStarted","Data":"e8313837e01626ae40c2af4f81bae1a7e21c0ab195cd6c8e25ab7b2843a4101b"} Nov 21 15:35:32 crc kubenswrapper[4774]: I1121 15:35:32.311264 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-64f867456d-c2t4b" event={"ID":"c1f65da0-5cc3-4448-96b0-61e27f2506cb","Type":"ContainerStarted","Data":"d9f8e3941fe7bfcdb6aa9101e7ee8c28b7b5088c15493d86ca64ffe091a24b86"} Nov 21 15:35:32 crc kubenswrapper[4774]: I1121 15:35:32.327674 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-64f867456d-c2t4b" podStartSLOduration=2.32765315 podStartE2EDuration="2.32765315s" podCreationTimestamp="2025-11-21 15:35:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:35:32.325071557 +0000 UTC m=+5522.977270816" watchObservedRunningTime="2025-11-21 15:35:32.32765315 +0000 UTC m=+5522.979852409" Nov 21 15:35:33 crc kubenswrapper[4774]: I1121 15:35:33.323350 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:35:35 crc kubenswrapper[4774]: I1121 15:35:35.887150 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:35:35 crc kubenswrapper[4774]: I1121 15:35:35.942774 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7"] Nov 21 15:35:35 crc kubenswrapper[4774]: I1121 15:35:35.943021 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerName="dnsmasq-dns" containerID="cri-o://fc6400a52eb124a4cc62c6db857453863736ba23593e5f02c9170b77bbe5961e" gracePeriod=10 Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.354114 4774 generic.go:334] "Generic (PLEG): container finished" podID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerID="fc6400a52eb124a4cc62c6db857453863736ba23593e5f02c9170b77bbe5961e" exitCode=0 Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.354354 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" event={"ID":"4d788755-293b-45ec-af03-c3c7ef1ffa87","Type":"ContainerDied","Data":"fc6400a52eb124a4cc62c6db857453863736ba23593e5f02c9170b77bbe5961e"} Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.354467 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" event={"ID":"4d788755-293b-45ec-af03-c3c7ef1ffa87","Type":"ContainerDied","Data":"e7a430a095af804458fb593b0ab29bb05b280860674f3bdcf5145005daa596aa"} Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.354487 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7a430a095af804458fb593b0ab29bb05b280860674f3bdcf5145005daa596aa" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.411030 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.483489 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-config\") pod \"4d788755-293b-45ec-af03-c3c7ef1ffa87\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.483554 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-nb\") pod \"4d788755-293b-45ec-af03-c3c7ef1ffa87\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.483592 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-sb\") pod \"4d788755-293b-45ec-af03-c3c7ef1ffa87\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.483631 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkz5v\" (UniqueName: \"kubernetes.io/projected/4d788755-293b-45ec-af03-c3c7ef1ffa87-kube-api-access-kkz5v\") pod \"4d788755-293b-45ec-af03-c3c7ef1ffa87\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.483782 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-dns-svc\") pod \"4d788755-293b-45ec-af03-c3c7ef1ffa87\" (UID: \"4d788755-293b-45ec-af03-c3c7ef1ffa87\") " Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.490798 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d788755-293b-45ec-af03-c3c7ef1ffa87-kube-api-access-kkz5v" (OuterVolumeSpecName: "kube-api-access-kkz5v") pod "4d788755-293b-45ec-af03-c3c7ef1ffa87" (UID: "4d788755-293b-45ec-af03-c3c7ef1ffa87"). InnerVolumeSpecName "kube-api-access-kkz5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.527199 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4d788755-293b-45ec-af03-c3c7ef1ffa87" (UID: "4d788755-293b-45ec-af03-c3c7ef1ffa87"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.534358 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4d788755-293b-45ec-af03-c3c7ef1ffa87" (UID: "4d788755-293b-45ec-af03-c3c7ef1ffa87"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.535240 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-config" (OuterVolumeSpecName: "config") pod "4d788755-293b-45ec-af03-c3c7ef1ffa87" (UID: "4d788755-293b-45ec-af03-c3c7ef1ffa87"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.535514 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4d788755-293b-45ec-af03-c3c7ef1ffa87" (UID: "4d788755-293b-45ec-af03-c3c7ef1ffa87"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.586564 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.586621 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.586634 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.586647 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4d788755-293b-45ec-af03-c3c7ef1ffa87-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:36 crc kubenswrapper[4774]: I1121 15:35:36.586662 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkz5v\" (UniqueName: \"kubernetes.io/projected/4d788755-293b-45ec-af03-c3c7ef1ffa87-kube-api-access-kkz5v\") on node \"crc\" DevicePath \"\"" Nov 21 15:35:37 crc kubenswrapper[4774]: I1121 15:35:37.362045 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7" Nov 21 15:35:37 crc kubenswrapper[4774]: I1121 15:35:37.399556 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7"] Nov 21 15:35:37 crc kubenswrapper[4774]: I1121 15:35:37.407328 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7dd4bf4c-v8tx7"] Nov 21 15:35:38 crc kubenswrapper[4774]: I1121 15:35:38.104526 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" path="/var/lib/kubelet/pods/4d788755-293b-45ec-af03-c3c7ef1ffa87/volumes" Nov 21 15:35:38 crc kubenswrapper[4774]: E1121 15:35:38.229226 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice\": RecentStats: unable to find data in memory cache]" Nov 21 15:35:40 crc kubenswrapper[4774]: I1121 15:35:40.655945 4774 scope.go:117] "RemoveContainer" containerID="a5cefbace06a8ddd5ebcf5ad785961f1fa7333d7f52d45983228ee60c195b53e" Nov 21 15:35:48 crc kubenswrapper[4774]: E1121 15:35:48.445986 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice\": RecentStats: unable to find data in memory cache]" Nov 21 15:35:58 crc kubenswrapper[4774]: E1121 15:35:58.663400 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704\": RecentStats: unable to find data in memory cache]" Nov 21 15:36:02 crc kubenswrapper[4774]: I1121 15:36:02.066942 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:36:02 crc kubenswrapper[4774]: I1121 15:36:02.107679 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-64f867456d-c2t4b" Nov 21 15:36:08 crc kubenswrapper[4774]: E1121 15:36:08.899354 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704\": RecentStats: unable to find data in memory cache]" Nov 21 15:36:19 crc kubenswrapper[4774]: E1121 15:36:19.149230 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704\": RecentStats: unable to find data in memory cache]" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.121914 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-s5c2v"] Nov 21 15:36:22 crc kubenswrapper[4774]: E1121 15:36:22.122704 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerName="init" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.122723 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerName="init" Nov 21 15:36:22 crc kubenswrapper[4774]: E1121 15:36:22.122734 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerName="dnsmasq-dns" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.122742 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerName="dnsmasq-dns" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.123023 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d788755-293b-45ec-af03-c3c7ef1ffa87" containerName="dnsmasq-dns" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.123847 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.139114 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-s5c2v"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.208632 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-lpd59"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.209727 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.223060 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-lpd59"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.240705 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz2xd\" (UniqueName: \"kubernetes.io/projected/59557569-c6d3-4b74-96e0-75c02abea174-kube-api-access-kz2xd\") pod \"nova-api-db-create-s5c2v\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.240782 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59557569-c6d3-4b74-96e0-75c02abea174-operator-scripts\") pod \"nova-api-db-create-s5c2v\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.310106 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-xxwjm"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.311575 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.317689 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xxwjm"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.329444 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-38dc-account-create-bbsjq"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.330899 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.332920 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.342471 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz2xd\" (UniqueName: \"kubernetes.io/projected/59557569-c6d3-4b74-96e0-75c02abea174-kube-api-access-kz2xd\") pod \"nova-api-db-create-s5c2v\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.342523 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59557569-c6d3-4b74-96e0-75c02abea174-operator-scripts\") pod \"nova-api-db-create-s5c2v\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.342562 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lc4n\" (UniqueName: \"kubernetes.io/projected/a65ba851-c659-4aa1-9db7-716479598c2e-kube-api-access-7lc4n\") pod \"nova-cell0-db-create-lpd59\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.342593 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a65ba851-c659-4aa1-9db7-716479598c2e-operator-scripts\") pod \"nova-cell0-db-create-lpd59\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.343776 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59557569-c6d3-4b74-96e0-75c02abea174-operator-scripts\") pod \"nova-api-db-create-s5c2v\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.356014 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-38dc-account-create-bbsjq"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.362694 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz2xd\" (UniqueName: \"kubernetes.io/projected/59557569-c6d3-4b74-96e0-75c02abea174-kube-api-access-kz2xd\") pod \"nova-api-db-create-s5c2v\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.443598 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxd6k\" (UniqueName: \"kubernetes.io/projected/9d2eab66-4529-4272-a749-f5cda51164e1-kube-api-access-qxd6k\") pod \"nova-api-38dc-account-create-bbsjq\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.443644 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9a0830b-b047-4cc2-aaba-b448f08dc43a-operator-scripts\") pod \"nova-cell1-db-create-xxwjm\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.443794 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlrx8\" (UniqueName: \"kubernetes.io/projected/b9a0830b-b047-4cc2-aaba-b448f08dc43a-kube-api-access-hlrx8\") pod \"nova-cell1-db-create-xxwjm\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.444015 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2eab66-4529-4272-a749-f5cda51164e1-operator-scripts\") pod \"nova-api-38dc-account-create-bbsjq\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.444224 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lc4n\" (UniqueName: \"kubernetes.io/projected/a65ba851-c659-4aa1-9db7-716479598c2e-kube-api-access-7lc4n\") pod \"nova-cell0-db-create-lpd59\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.444263 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a65ba851-c659-4aa1-9db7-716479598c2e-operator-scripts\") pod \"nova-cell0-db-create-lpd59\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.445032 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a65ba851-c659-4aa1-9db7-716479598c2e-operator-scripts\") pod \"nova-cell0-db-create-lpd59\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.446883 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.461341 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lc4n\" (UniqueName: \"kubernetes.io/projected/a65ba851-c659-4aa1-9db7-716479598c2e-kube-api-access-7lc4n\") pod \"nova-cell0-db-create-lpd59\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.521376 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-2e8b-account-create-wl72v"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.522945 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.524683 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.538294 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.543381 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2e8b-account-create-wl72v"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.546490 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlrx8\" (UniqueName: \"kubernetes.io/projected/b9a0830b-b047-4cc2-aaba-b448f08dc43a-kube-api-access-hlrx8\") pod \"nova-cell1-db-create-xxwjm\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.547102 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2eab66-4529-4272-a749-f5cda51164e1-operator-scripts\") pod \"nova-api-38dc-account-create-bbsjq\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.547292 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxd6k\" (UniqueName: \"kubernetes.io/projected/9d2eab66-4529-4272-a749-f5cda51164e1-kube-api-access-qxd6k\") pod \"nova-api-38dc-account-create-bbsjq\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.547375 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9a0830b-b047-4cc2-aaba-b448f08dc43a-operator-scripts\") pod \"nova-cell1-db-create-xxwjm\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.548322 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9a0830b-b047-4cc2-aaba-b448f08dc43a-operator-scripts\") pod \"nova-cell1-db-create-xxwjm\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.548353 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2eab66-4529-4272-a749-f5cda51164e1-operator-scripts\") pod \"nova-api-38dc-account-create-bbsjq\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.564700 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxd6k\" (UniqueName: \"kubernetes.io/projected/9d2eab66-4529-4272-a749-f5cda51164e1-kube-api-access-qxd6k\") pod \"nova-api-38dc-account-create-bbsjq\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.566354 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlrx8\" (UniqueName: \"kubernetes.io/projected/b9a0830b-b047-4cc2-aaba-b448f08dc43a-kube-api-access-hlrx8\") pod \"nova-cell1-db-create-xxwjm\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.627530 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.645676 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.648927 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtd2v\" (UniqueName: \"kubernetes.io/projected/3c24e11c-ddbd-4804-acda-d04dd5e0e799-kube-api-access-vtd2v\") pod \"nova-cell0-2e8b-account-create-wl72v\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.649016 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c24e11c-ddbd-4804-acda-d04dd5e0e799-operator-scripts\") pod \"nova-cell0-2e8b-account-create-wl72v\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.726654 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-18af-account-create-n489c"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.728004 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.733555 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.736740 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-18af-account-create-n489c"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.751046 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtd2v\" (UniqueName: \"kubernetes.io/projected/3c24e11c-ddbd-4804-acda-d04dd5e0e799-kube-api-access-vtd2v\") pod \"nova-cell0-2e8b-account-create-wl72v\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.751145 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c24e11c-ddbd-4804-acda-d04dd5e0e799-operator-scripts\") pod \"nova-cell0-2e8b-account-create-wl72v\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.752111 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c24e11c-ddbd-4804-acda-d04dd5e0e799-operator-scripts\") pod \"nova-cell0-2e8b-account-create-wl72v\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.777767 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtd2v\" (UniqueName: \"kubernetes.io/projected/3c24e11c-ddbd-4804-acda-d04dd5e0e799-kube-api-access-vtd2v\") pod \"nova-cell0-2e8b-account-create-wl72v\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.852539 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-operator-scripts\") pod \"nova-cell1-18af-account-create-n489c\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.852894 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67ztr\" (UniqueName: \"kubernetes.io/projected/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-kube-api-access-67ztr\") pod \"nova-cell1-18af-account-create-n489c\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.857244 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.948891 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-s5c2v"] Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.954402 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-operator-scripts\") pod \"nova-cell1-18af-account-create-n489c\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.954608 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67ztr\" (UniqueName: \"kubernetes.io/projected/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-kube-api-access-67ztr\") pod \"nova-cell1-18af-account-create-n489c\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.955299 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-operator-scripts\") pod \"nova-cell1-18af-account-create-n489c\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:22 crc kubenswrapper[4774]: W1121 15:36:22.963363 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59557569_c6d3_4b74_96e0_75c02abea174.slice/crio-7c7ae3335bd190a2f2b46c86b00210e6192d76832a7008a456c708a337ca3728 WatchSource:0}: Error finding container 7c7ae3335bd190a2f2b46c86b00210e6192d76832a7008a456c708a337ca3728: Status 404 returned error can't find the container with id 7c7ae3335bd190a2f2b46c86b00210e6192d76832a7008a456c708a337ca3728 Nov 21 15:36:22 crc kubenswrapper[4774]: I1121 15:36:22.971560 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67ztr\" (UniqueName: \"kubernetes.io/projected/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-kube-api-access-67ztr\") pod \"nova-cell1-18af-account-create-n489c\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.080551 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:23 crc kubenswrapper[4774]: W1121 15:36:23.122521 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda65ba851_c659_4aa1_9db7_716479598c2e.slice/crio-bfdf9e18417073427878f1bfbf85f9edb1446eade0f5b0a238cd9e7fd8d9514b WatchSource:0}: Error finding container bfdf9e18417073427878f1bfbf85f9edb1446eade0f5b0a238cd9e7fd8d9514b: Status 404 returned error can't find the container with id bfdf9e18417073427878f1bfbf85f9edb1446eade0f5b0a238cd9e7fd8d9514b Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.124356 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-lpd59"] Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.207156 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xxwjm"] Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.213207 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-38dc-account-create-bbsjq"] Nov 21 15:36:23 crc kubenswrapper[4774]: W1121 15:36:23.213377 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9a0830b_b047_4cc2_aaba_b448f08dc43a.slice/crio-cdbae314bb51562883dc16f39dd6204fbbb88c64912ac73e455363442391edb9 WatchSource:0}: Error finding container cdbae314bb51562883dc16f39dd6204fbbb88c64912ac73e455363442391edb9: Status 404 returned error can't find the container with id cdbae314bb51562883dc16f39dd6204fbbb88c64912ac73e455363442391edb9 Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.350290 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-2e8b-account-create-wl72v"] Nov 21 15:36:23 crc kubenswrapper[4774]: W1121 15:36:23.408145 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c24e11c_ddbd_4804_acda_d04dd5e0e799.slice/crio-cf7f21b16d234f98ba07263b5eba709a78640de94a0d23dff6e141141156452c WatchSource:0}: Error finding container cf7f21b16d234f98ba07263b5eba709a78640de94a0d23dff6e141141156452c: Status 404 returned error can't find the container with id cf7f21b16d234f98ba07263b5eba709a78640de94a0d23dff6e141141156452c Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.563128 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-18af-account-create-n489c"] Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.788616 4774 generic.go:334] "Generic (PLEG): container finished" podID="59557569-c6d3-4b74-96e0-75c02abea174" containerID="9fb334a0f35bdd6fc020180c71510b22124f3ea7fd0059877bdfa571067f6385" exitCode=0 Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.788668 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s5c2v" event={"ID":"59557569-c6d3-4b74-96e0-75c02abea174","Type":"ContainerDied","Data":"9fb334a0f35bdd6fc020180c71510b22124f3ea7fd0059877bdfa571067f6385"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.788715 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s5c2v" event={"ID":"59557569-c6d3-4b74-96e0-75c02abea174","Type":"ContainerStarted","Data":"7c7ae3335bd190a2f2b46c86b00210e6192d76832a7008a456c708a337ca3728"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.790885 4774 generic.go:334] "Generic (PLEG): container finished" podID="9d2eab66-4529-4272-a749-f5cda51164e1" containerID="51405d86b51a3a16331046fbd5fa8b0fbfeda7f04fd8b713594c37ba79d521bf" exitCode=0 Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.790946 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-38dc-account-create-bbsjq" event={"ID":"9d2eab66-4529-4272-a749-f5cda51164e1","Type":"ContainerDied","Data":"51405d86b51a3a16331046fbd5fa8b0fbfeda7f04fd8b713594c37ba79d521bf"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.790966 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-38dc-account-create-bbsjq" event={"ID":"9d2eab66-4529-4272-a749-f5cda51164e1","Type":"ContainerStarted","Data":"b64f6be44999a82ff4bfed09a906c95401595fb3443bb30d2d86634c2ab991e7"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.792176 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-18af-account-create-n489c" event={"ID":"fda21d9c-81e1-41d8-b983-0e1e46b32bcc","Type":"ContainerStarted","Data":"d4672428c69448025dbb9bf07861ab506fde16f5141e0945a18f46ad018cef94"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.793591 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2e8b-account-create-wl72v" event={"ID":"3c24e11c-ddbd-4804-acda-d04dd5e0e799","Type":"ContainerStarted","Data":"cf7f21b16d234f98ba07263b5eba709a78640de94a0d23dff6e141141156452c"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.795027 4774 generic.go:334] "Generic (PLEG): container finished" podID="b9a0830b-b047-4cc2-aaba-b448f08dc43a" containerID="8d96762923bd79d773d80ca706388011cb4ecf7f05241e5751b3db099fcf4b39" exitCode=0 Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.795097 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xxwjm" event={"ID":"b9a0830b-b047-4cc2-aaba-b448f08dc43a","Type":"ContainerDied","Data":"8d96762923bd79d773d80ca706388011cb4ecf7f05241e5751b3db099fcf4b39"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.795119 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xxwjm" event={"ID":"b9a0830b-b047-4cc2-aaba-b448f08dc43a","Type":"ContainerStarted","Data":"cdbae314bb51562883dc16f39dd6204fbbb88c64912ac73e455363442391edb9"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.798392 4774 generic.go:334] "Generic (PLEG): container finished" podID="a65ba851-c659-4aa1-9db7-716479598c2e" containerID="2bf651a832617b45579b6e6855693da67ad0b4ba7bbcdebdd074b46dd9b0c2d0" exitCode=0 Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.798435 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-lpd59" event={"ID":"a65ba851-c659-4aa1-9db7-716479598c2e","Type":"ContainerDied","Data":"2bf651a832617b45579b6e6855693da67ad0b4ba7bbcdebdd074b46dd9b0c2d0"} Nov 21 15:36:23 crc kubenswrapper[4774]: I1121 15:36:23.798459 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-lpd59" event={"ID":"a65ba851-c659-4aa1-9db7-716479598c2e","Type":"ContainerStarted","Data":"bfdf9e18417073427878f1bfbf85f9edb1446eade0f5b0a238cd9e7fd8d9514b"} Nov 21 15:36:24 crc kubenswrapper[4774]: I1121 15:36:24.808160 4774 generic.go:334] "Generic (PLEG): container finished" podID="fda21d9c-81e1-41d8-b983-0e1e46b32bcc" containerID="62d827c975db7c505c288ac0928199bf72d5dede89475bc4a849187b7e66e913" exitCode=0 Nov 21 15:36:24 crc kubenswrapper[4774]: I1121 15:36:24.808245 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-18af-account-create-n489c" event={"ID":"fda21d9c-81e1-41d8-b983-0e1e46b32bcc","Type":"ContainerDied","Data":"62d827c975db7c505c288ac0928199bf72d5dede89475bc4a849187b7e66e913"} Nov 21 15:36:24 crc kubenswrapper[4774]: I1121 15:36:24.810285 4774 generic.go:334] "Generic (PLEG): container finished" podID="3c24e11c-ddbd-4804-acda-d04dd5e0e799" containerID="d799e91e7cedd65e12addf4ad46a078b0f63dfc609abfe23d6cfaadd513822f7" exitCode=0 Nov 21 15:36:24 crc kubenswrapper[4774]: I1121 15:36:24.810541 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2e8b-account-create-wl72v" event={"ID":"3c24e11c-ddbd-4804-acda-d04dd5e0e799","Type":"ContainerDied","Data":"d799e91e7cedd65e12addf4ad46a078b0f63dfc609abfe23d6cfaadd513822f7"} Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.187933 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.298601 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lc4n\" (UniqueName: \"kubernetes.io/projected/a65ba851-c659-4aa1-9db7-716479598c2e-kube-api-access-7lc4n\") pod \"a65ba851-c659-4aa1-9db7-716479598c2e\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.298661 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a65ba851-c659-4aa1-9db7-716479598c2e-operator-scripts\") pod \"a65ba851-c659-4aa1-9db7-716479598c2e\" (UID: \"a65ba851-c659-4aa1-9db7-716479598c2e\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.299643 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a65ba851-c659-4aa1-9db7-716479598c2e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a65ba851-c659-4aa1-9db7-716479598c2e" (UID: "a65ba851-c659-4aa1-9db7-716479598c2e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.306455 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a65ba851-c659-4aa1-9db7-716479598c2e-kube-api-access-7lc4n" (OuterVolumeSpecName: "kube-api-access-7lc4n") pod "a65ba851-c659-4aa1-9db7-716479598c2e" (UID: "a65ba851-c659-4aa1-9db7-716479598c2e"). InnerVolumeSpecName "kube-api-access-7lc4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.357745 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.368226 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.376553 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.401018 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lc4n\" (UniqueName: \"kubernetes.io/projected/a65ba851-c659-4aa1-9db7-716479598c2e-kube-api-access-7lc4n\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.401057 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a65ba851-c659-4aa1-9db7-716479598c2e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502176 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2eab66-4529-4272-a749-f5cda51164e1-operator-scripts\") pod \"9d2eab66-4529-4272-a749-f5cda51164e1\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502314 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz2xd\" (UniqueName: \"kubernetes.io/projected/59557569-c6d3-4b74-96e0-75c02abea174-kube-api-access-kz2xd\") pod \"59557569-c6d3-4b74-96e0-75c02abea174\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502361 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlrx8\" (UniqueName: \"kubernetes.io/projected/b9a0830b-b047-4cc2-aaba-b448f08dc43a-kube-api-access-hlrx8\") pod \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502437 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9a0830b-b047-4cc2-aaba-b448f08dc43a-operator-scripts\") pod \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\" (UID: \"b9a0830b-b047-4cc2-aaba-b448f08dc43a\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502518 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59557569-c6d3-4b74-96e0-75c02abea174-operator-scripts\") pod \"59557569-c6d3-4b74-96e0-75c02abea174\" (UID: \"59557569-c6d3-4b74-96e0-75c02abea174\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502562 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxd6k\" (UniqueName: \"kubernetes.io/projected/9d2eab66-4529-4272-a749-f5cda51164e1-kube-api-access-qxd6k\") pod \"9d2eab66-4529-4272-a749-f5cda51164e1\" (UID: \"9d2eab66-4529-4272-a749-f5cda51164e1\") " Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.502967 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d2eab66-4529-4272-a749-f5cda51164e1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9d2eab66-4529-4272-a749-f5cda51164e1" (UID: "9d2eab66-4529-4272-a749-f5cda51164e1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.503262 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9a0830b-b047-4cc2-aaba-b448f08dc43a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b9a0830b-b047-4cc2-aaba-b448f08dc43a" (UID: "b9a0830b-b047-4cc2-aaba-b448f08dc43a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.503264 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/59557569-c6d3-4b74-96e0-75c02abea174-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "59557569-c6d3-4b74-96e0-75c02abea174" (UID: "59557569-c6d3-4b74-96e0-75c02abea174"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.503528 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9a0830b-b047-4cc2-aaba-b448f08dc43a-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.503548 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d2eab66-4529-4272-a749-f5cda51164e1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.505796 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59557569-c6d3-4b74-96e0-75c02abea174-kube-api-access-kz2xd" (OuterVolumeSpecName: "kube-api-access-kz2xd") pod "59557569-c6d3-4b74-96e0-75c02abea174" (UID: "59557569-c6d3-4b74-96e0-75c02abea174"). InnerVolumeSpecName "kube-api-access-kz2xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.505891 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d2eab66-4529-4272-a749-f5cda51164e1-kube-api-access-qxd6k" (OuterVolumeSpecName: "kube-api-access-qxd6k") pod "9d2eab66-4529-4272-a749-f5cda51164e1" (UID: "9d2eab66-4529-4272-a749-f5cda51164e1"). InnerVolumeSpecName "kube-api-access-qxd6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.506631 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9a0830b-b047-4cc2-aaba-b448f08dc43a-kube-api-access-hlrx8" (OuterVolumeSpecName: "kube-api-access-hlrx8") pod "b9a0830b-b047-4cc2-aaba-b448f08dc43a" (UID: "b9a0830b-b047-4cc2-aaba-b448f08dc43a"). InnerVolumeSpecName "kube-api-access-hlrx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.605740 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlrx8\" (UniqueName: \"kubernetes.io/projected/b9a0830b-b047-4cc2-aaba-b448f08dc43a-kube-api-access-hlrx8\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.605803 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/59557569-c6d3-4b74-96e0-75c02abea174-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.605848 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxd6k\" (UniqueName: \"kubernetes.io/projected/9d2eab66-4529-4272-a749-f5cda51164e1-kube-api-access-qxd6k\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.605869 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz2xd\" (UniqueName: \"kubernetes.io/projected/59557569-c6d3-4b74-96e0-75c02abea174-kube-api-access-kz2xd\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.824795 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-38dc-account-create-bbsjq" event={"ID":"9d2eab66-4529-4272-a749-f5cda51164e1","Type":"ContainerDied","Data":"b64f6be44999a82ff4bfed09a906c95401595fb3443bb30d2d86634c2ab991e7"} Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.825146 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b64f6be44999a82ff4bfed09a906c95401595fb3443bb30d2d86634c2ab991e7" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.824849 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-38dc-account-create-bbsjq" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.829916 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xxwjm" event={"ID":"b9a0830b-b047-4cc2-aaba-b448f08dc43a","Type":"ContainerDied","Data":"cdbae314bb51562883dc16f39dd6204fbbb88c64912ac73e455363442391edb9"} Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.829981 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdbae314bb51562883dc16f39dd6204fbbb88c64912ac73e455363442391edb9" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.830016 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xxwjm" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.832759 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-lpd59" event={"ID":"a65ba851-c659-4aa1-9db7-716479598c2e","Type":"ContainerDied","Data":"bfdf9e18417073427878f1bfbf85f9edb1446eade0f5b0a238cd9e7fd8d9514b"} Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.832837 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bfdf9e18417073427878f1bfbf85f9edb1446eade0f5b0a238cd9e7fd8d9514b" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.832882 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-lpd59" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.835775 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-s5c2v" event={"ID":"59557569-c6d3-4b74-96e0-75c02abea174","Type":"ContainerDied","Data":"7c7ae3335bd190a2f2b46c86b00210e6192d76832a7008a456c708a337ca3728"} Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.835866 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c7ae3335bd190a2f2b46c86b00210e6192d76832a7008a456c708a337ca3728" Nov 21 15:36:25 crc kubenswrapper[4774]: I1121 15:36:25.836141 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-s5c2v" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.232606 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.240683 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.421609 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtd2v\" (UniqueName: \"kubernetes.io/projected/3c24e11c-ddbd-4804-acda-d04dd5e0e799-kube-api-access-vtd2v\") pod \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.421693 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-operator-scripts\") pod \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.421761 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67ztr\" (UniqueName: \"kubernetes.io/projected/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-kube-api-access-67ztr\") pod \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\" (UID: \"fda21d9c-81e1-41d8-b983-0e1e46b32bcc\") " Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.421792 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c24e11c-ddbd-4804-acda-d04dd5e0e799-operator-scripts\") pod \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\" (UID: \"3c24e11c-ddbd-4804-acda-d04dd5e0e799\") " Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.422516 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c24e11c-ddbd-4804-acda-d04dd5e0e799-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3c24e11c-ddbd-4804-acda-d04dd5e0e799" (UID: "3c24e11c-ddbd-4804-acda-d04dd5e0e799"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.422603 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fda21d9c-81e1-41d8-b983-0e1e46b32bcc" (UID: "fda21d9c-81e1-41d8-b983-0e1e46b32bcc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.425255 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-kube-api-access-67ztr" (OuterVolumeSpecName: "kube-api-access-67ztr") pod "fda21d9c-81e1-41d8-b983-0e1e46b32bcc" (UID: "fda21d9c-81e1-41d8-b983-0e1e46b32bcc"). InnerVolumeSpecName "kube-api-access-67ztr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.425356 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c24e11c-ddbd-4804-acda-d04dd5e0e799-kube-api-access-vtd2v" (OuterVolumeSpecName: "kube-api-access-vtd2v") pod "3c24e11c-ddbd-4804-acda-d04dd5e0e799" (UID: "3c24e11c-ddbd-4804-acda-d04dd5e0e799"). InnerVolumeSpecName "kube-api-access-vtd2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.523894 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtd2v\" (UniqueName: \"kubernetes.io/projected/3c24e11c-ddbd-4804-acda-d04dd5e0e799-kube-api-access-vtd2v\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.523940 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.523950 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67ztr\" (UniqueName: \"kubernetes.io/projected/fda21d9c-81e1-41d8-b983-0e1e46b32bcc-kube-api-access-67ztr\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.523961 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c24e11c-ddbd-4804-acda-d04dd5e0e799-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.849955 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-18af-account-create-n489c" event={"ID":"fda21d9c-81e1-41d8-b983-0e1e46b32bcc","Type":"ContainerDied","Data":"d4672428c69448025dbb9bf07861ab506fde16f5141e0945a18f46ad018cef94"} Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.850048 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4672428c69448025dbb9bf07861ab506fde16f5141e0945a18f46ad018cef94" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.849986 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-18af-account-create-n489c" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.852332 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-2e8b-account-create-wl72v" event={"ID":"3c24e11c-ddbd-4804-acda-d04dd5e0e799","Type":"ContainerDied","Data":"cf7f21b16d234f98ba07263b5eba709a78640de94a0d23dff6e141141156452c"} Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.852379 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf7f21b16d234f98ba07263b5eba709a78640de94a0d23dff6e141141156452c" Nov 21 15:36:26 crc kubenswrapper[4774]: I1121 15:36:26.852399 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-2e8b-account-create-wl72v" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.776364 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c2vlc"] Nov 21 15:36:27 crc kubenswrapper[4774]: E1121 15:36:27.777113 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c24e11c-ddbd-4804-acda-d04dd5e0e799" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777133 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c24e11c-ddbd-4804-acda-d04dd5e0e799" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: E1121 15:36:27.777155 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fda21d9c-81e1-41d8-b983-0e1e46b32bcc" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777162 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fda21d9c-81e1-41d8-b983-0e1e46b32bcc" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: E1121 15:36:27.777182 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2eab66-4529-4272-a749-f5cda51164e1" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777190 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2eab66-4529-4272-a749-f5cda51164e1" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: E1121 15:36:27.777199 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a65ba851-c659-4aa1-9db7-716479598c2e" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777206 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a65ba851-c659-4aa1-9db7-716479598c2e" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: E1121 15:36:27.777221 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9a0830b-b047-4cc2-aaba-b448f08dc43a" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777228 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9a0830b-b047-4cc2-aaba-b448f08dc43a" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: E1121 15:36:27.777249 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59557569-c6d3-4b74-96e0-75c02abea174" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777260 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="59557569-c6d3-4b74-96e0-75c02abea174" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777456 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d2eab66-4529-4272-a749-f5cda51164e1" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777468 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9a0830b-b047-4cc2-aaba-b448f08dc43a" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777487 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fda21d9c-81e1-41d8-b983-0e1e46b32bcc" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777502 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="59557569-c6d3-4b74-96e0-75c02abea174" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777512 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a65ba851-c659-4aa1-9db7-716479598c2e" containerName="mariadb-database-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.777524 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c24e11c-ddbd-4804-acda-d04dd5e0e799" containerName="mariadb-account-create" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.778271 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.780275 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.781241 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.781420 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xrrpv" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.816086 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c2vlc"] Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.950138 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-config-data\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.950206 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-scripts\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.950307 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrhkj\" (UniqueName: \"kubernetes.io/projected/ef417cce-2b05-45db-86b0-0b8b907690b4-kube-api-access-jrhkj\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:27 crc kubenswrapper[4774]: I1121 15:36:27.950392 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.051741 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-config-data\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.051790 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-scripts\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.051870 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrhkj\" (UniqueName: \"kubernetes.io/projected/ef417cce-2b05-45db-86b0-0b8b907690b4-kube-api-access-jrhkj\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.051903 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.056752 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-scripts\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.057039 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.058715 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-config-data\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.069951 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrhkj\" (UniqueName: \"kubernetes.io/projected/ef417cce-2b05-45db-86b0-0b8b907690b4-kube-api-access-jrhkj\") pod \"nova-cell0-conductor-db-sync-c2vlc\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.116727 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.383059 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c2vlc"] Nov 21 15:36:28 crc kubenswrapper[4774]: W1121 15:36:28.386408 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef417cce_2b05_45db_86b0_0b8b907690b4.slice/crio-082ccfefba5f7503f54542688e19ec8e332d9c117b9fc527491ae1951a31e652 WatchSource:0}: Error finding container 082ccfefba5f7503f54542688e19ec8e332d9c117b9fc527491ae1951a31e652: Status 404 returned error can't find the container with id 082ccfefba5f7503f54542688e19ec8e332d9c117b9fc527491ae1951a31e652 Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.880840 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" event={"ID":"ef417cce-2b05-45db-86b0-0b8b907690b4","Type":"ContainerStarted","Data":"c2b4ba0ca7b68d0812924bad644b73737282143d85f988987606b0d42a11e26a"} Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.881137 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" event={"ID":"ef417cce-2b05-45db-86b0-0b8b907690b4","Type":"ContainerStarted","Data":"082ccfefba5f7503f54542688e19ec8e332d9c117b9fc527491ae1951a31e652"} Nov 21 15:36:28 crc kubenswrapper[4774]: I1121 15:36:28.906068 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" podStartSLOduration=1.906036694 podStartE2EDuration="1.906036694s" podCreationTimestamp="2025-11-21 15:36:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:28.894722741 +0000 UTC m=+5579.546922000" watchObservedRunningTime="2025-11-21 15:36:28.906036694 +0000 UTC m=+5579.558235983" Nov 21 15:36:29 crc kubenswrapper[4774]: E1121 15:36:29.414586 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c58e79c_d93c_43d1_bc52_d7ba1de82482.slice/crio-643da8e3d51164a44d4343fcb31d799099c9ffabafb77e23ac408b984700b704\": RecentStats: unable to find data in memory cache]" Nov 21 15:36:29 crc kubenswrapper[4774]: I1121 15:36:29.601179 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:36:29 crc kubenswrapper[4774]: I1121 15:36:29.601247 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:36:34 crc kubenswrapper[4774]: I1121 15:36:34.936146 4774 generic.go:334] "Generic (PLEG): container finished" podID="ef417cce-2b05-45db-86b0-0b8b907690b4" containerID="c2b4ba0ca7b68d0812924bad644b73737282143d85f988987606b0d42a11e26a" exitCode=0 Nov 21 15:36:34 crc kubenswrapper[4774]: I1121 15:36:34.936177 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" event={"ID":"ef417cce-2b05-45db-86b0-0b8b907690b4","Type":"ContainerDied","Data":"c2b4ba0ca7b68d0812924bad644b73737282143d85f988987606b0d42a11e26a"} Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.232425 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.302176 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-scripts\") pod \"ef417cce-2b05-45db-86b0-0b8b907690b4\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.302573 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-config-data\") pod \"ef417cce-2b05-45db-86b0-0b8b907690b4\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.307777 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-scripts" (OuterVolumeSpecName: "scripts") pod "ef417cce-2b05-45db-86b0-0b8b907690b4" (UID: "ef417cce-2b05-45db-86b0-0b8b907690b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.330526 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-config-data" (OuterVolumeSpecName: "config-data") pod "ef417cce-2b05-45db-86b0-0b8b907690b4" (UID: "ef417cce-2b05-45db-86b0-0b8b907690b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.405189 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-combined-ca-bundle\") pod \"ef417cce-2b05-45db-86b0-0b8b907690b4\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.405366 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrhkj\" (UniqueName: \"kubernetes.io/projected/ef417cce-2b05-45db-86b0-0b8b907690b4-kube-api-access-jrhkj\") pod \"ef417cce-2b05-45db-86b0-0b8b907690b4\" (UID: \"ef417cce-2b05-45db-86b0-0b8b907690b4\") " Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.405920 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.405949 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.407981 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef417cce-2b05-45db-86b0-0b8b907690b4-kube-api-access-jrhkj" (OuterVolumeSpecName: "kube-api-access-jrhkj") pod "ef417cce-2b05-45db-86b0-0b8b907690b4" (UID: "ef417cce-2b05-45db-86b0-0b8b907690b4"). InnerVolumeSpecName "kube-api-access-jrhkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.431949 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef417cce-2b05-45db-86b0-0b8b907690b4" (UID: "ef417cce-2b05-45db-86b0-0b8b907690b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.507016 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrhkj\" (UniqueName: \"kubernetes.io/projected/ef417cce-2b05-45db-86b0-0b8b907690b4-kube-api-access-jrhkj\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.507043 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef417cce-2b05-45db-86b0-0b8b907690b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.964067 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" event={"ID":"ef417cce-2b05-45db-86b0-0b8b907690b4","Type":"ContainerDied","Data":"082ccfefba5f7503f54542688e19ec8e332d9c117b9fc527491ae1951a31e652"} Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.964489 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="082ccfefba5f7503f54542688e19ec8e332d9c117b9fc527491ae1951a31e652" Nov 21 15:36:36 crc kubenswrapper[4774]: I1121 15:36:36.964237 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-c2vlc" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.042411 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:36:37 crc kubenswrapper[4774]: E1121 15:36:37.042936 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef417cce-2b05-45db-86b0-0b8b907690b4" containerName="nova-cell0-conductor-db-sync" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.042960 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef417cce-2b05-45db-86b0-0b8b907690b4" containerName="nova-cell0-conductor-db-sync" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.043183 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef417cce-2b05-45db-86b0-0b8b907690b4" containerName="nova-cell0-conductor-db-sync" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.043970 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.047427 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.047601 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-xrrpv" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.053172 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.117753 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.117831 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6fv8\" (UniqueName: \"kubernetes.io/projected/a9ab3689-3218-47c5-a72c-7e187b48fc37-kube-api-access-s6fv8\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.117879 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.220026 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.220077 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6fv8\" (UniqueName: \"kubernetes.io/projected/a9ab3689-3218-47c5-a72c-7e187b48fc37-kube-api-access-s6fv8\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.220131 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.225275 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.231512 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.236365 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6fv8\" (UniqueName: \"kubernetes.io/projected/a9ab3689-3218-47c5-a72c-7e187b48fc37-kube-api-access-s6fv8\") pod \"nova-cell0-conductor-0\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.432385 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.878235 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:36:37 crc kubenswrapper[4774]: W1121 15:36:37.881687 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9ab3689_3218_47c5_a72c_7e187b48fc37.slice/crio-3b5d0119d9b19c292495a4e1650291fe70302eedf253883f4ed1871fd1f906fc WatchSource:0}: Error finding container 3b5d0119d9b19c292495a4e1650291fe70302eedf253883f4ed1871fd1f906fc: Status 404 returned error can't find the container with id 3b5d0119d9b19c292495a4e1650291fe70302eedf253883f4ed1871fd1f906fc Nov 21 15:36:37 crc kubenswrapper[4774]: I1121 15:36:37.974259 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a9ab3689-3218-47c5-a72c-7e187b48fc37","Type":"ContainerStarted","Data":"3b5d0119d9b19c292495a4e1650291fe70302eedf253883f4ed1871fd1f906fc"} Nov 21 15:36:38 crc kubenswrapper[4774]: I1121 15:36:38.999261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a9ab3689-3218-47c5-a72c-7e187b48fc37","Type":"ContainerStarted","Data":"7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b"} Nov 21 15:36:39 crc kubenswrapper[4774]: I1121 15:36:38.999844 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:39 crc kubenswrapper[4774]: I1121 15:36:39.034564 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.034541981 podStartE2EDuration="2.034541981s" podCreationTimestamp="2025-11-21 15:36:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:39.014742446 +0000 UTC m=+5589.666941715" watchObservedRunningTime="2025-11-21 15:36:39.034541981 +0000 UTC m=+5589.686741240" Nov 21 15:36:40 crc kubenswrapper[4774]: I1121 15:36:40.757425 4774 scope.go:117] "RemoveContainer" containerID="08dd28986e7b778398e849af0aa12723664c49d426f641f8fafb397674009f41" Nov 21 15:36:47 crc kubenswrapper[4774]: I1121 15:36:47.464898 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 21 15:36:47 crc kubenswrapper[4774]: I1121 15:36:47.966396 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-qpm97"] Nov 21 15:36:47 crc kubenswrapper[4774]: I1121 15:36:47.968438 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:47 crc kubenswrapper[4774]: I1121 15:36:47.974725 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-qpm97"] Nov 21 15:36:47 crc kubenswrapper[4774]: I1121 15:36:47.975608 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Nov 21 15:36:47 crc kubenswrapper[4774]: I1121 15:36:47.987576 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.041853 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-config-data\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.042013 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr2wc\" (UniqueName: \"kubernetes.io/projected/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-kube-api-access-dr2wc\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.042075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-scripts\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.042104 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.063732 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.065138 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.068699 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.082301 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146543 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-config-data\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146618 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcf6s\" (UniqueName: \"kubernetes.io/projected/d584d217-33a3-49fb-bc49-7a41563768fc-kube-api-access-fcf6s\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146664 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-config-data\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146709 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146751 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr2wc\" (UniqueName: \"kubernetes.io/projected/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-kube-api-access-dr2wc\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146808 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-scripts\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.146851 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.151117 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.153065 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.156026 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.164286 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.164639 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-config-data\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.165503 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-scripts\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.174261 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.192153 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr2wc\" (UniqueName: \"kubernetes.io/projected/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-kube-api-access-dr2wc\") pod \"nova-cell0-cell-mapping-qpm97\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.269665 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.276178 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.287904 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e00c2f7-6fd0-4422-9896-1e1764281b0c-logs\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.288253 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-config-data\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.288400 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-config-data\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.296309 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.298011 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.300079 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcf6s\" (UniqueName: \"kubernetes.io/projected/d584d217-33a3-49fb-bc49-7a41563768fc-kube-api-access-fcf6s\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.300165 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.300354 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.300925 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gg8b\" (UniqueName: \"kubernetes.io/projected/6e00c2f7-6fd0-4422-9896-1e1764281b0c-kube-api-access-5gg8b\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.313556 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.315405 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-config-data\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.341176 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcf6s\" (UniqueName: \"kubernetes.io/projected/d584d217-33a3-49fb-bc49-7a41563768fc-kube-api-access-fcf6s\") pod \"nova-scheduler-0\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.384350 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.387261 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.400173 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.402308 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.403735 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.406913 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.406974 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.407034 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86ztl\" (UniqueName: \"kubernetes.io/projected/dc385c54-02aa-4582-924c-3bc67c99b870-kube-api-access-86ztl\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.407078 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gg8b\" (UniqueName: \"kubernetes.io/projected/6e00c2f7-6fd0-4422-9896-1e1764281b0c-kube-api-access-5gg8b\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.407134 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.407191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e00c2f7-6fd0-4422-9896-1e1764281b0c-logs\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.407292 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-config-data\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.408317 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e00c2f7-6fd0-4422-9896-1e1764281b0c-logs\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.419645 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.421669 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-config-data\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.429254 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.433279 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gg8b\" (UniqueName: \"kubernetes.io/projected/6e00c2f7-6fd0-4422-9896-1e1764281b0c-kube-api-access-5gg8b\") pod \"nova-metadata-0\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.451890 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c6f5885c9-r95pq"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.454424 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.469924 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6f5885c9-r95pq"] Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509036 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-config-data\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509092 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509165 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509188 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509215 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509248 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-dns-svc\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509479 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-config\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509547 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwfcn\" (UniqueName: \"kubernetes.io/projected/c4432896-ed78-43ff-83fa-daf5ba668923-kube-api-access-hwfcn\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509576 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkqwf\" (UniqueName: \"kubernetes.io/projected/dc14b10f-4cd2-4747-9137-39fb3f97ead4-kube-api-access-pkqwf\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509623 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509650 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4432896-ed78-43ff-83fa-daf5ba668923-logs\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.509960 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86ztl\" (UniqueName: \"kubernetes.io/projected/dc385c54-02aa-4582-924c-3bc67c99b870-kube-api-access-86ztl\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.514501 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.514758 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.533934 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86ztl\" (UniqueName: \"kubernetes.io/projected/dc385c54-02aa-4582-924c-3bc67c99b870-kube-api-access-86ztl\") pod \"nova-cell1-novncproxy-0\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.589399 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611473 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-dns-svc\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611554 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-config\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611632 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwfcn\" (UniqueName: \"kubernetes.io/projected/c4432896-ed78-43ff-83fa-daf5ba668923-kube-api-access-hwfcn\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611660 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkqwf\" (UniqueName: \"kubernetes.io/projected/dc14b10f-4cd2-4747-9137-39fb3f97ead4-kube-api-access-pkqwf\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611716 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4432896-ed78-43ff-83fa-daf5ba668923-logs\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611760 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-config-data\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611848 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.611880 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.612902 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-sb\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.613540 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-dns-svc\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.614263 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-config\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.615088 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4432896-ed78-43ff-83fa-daf5ba668923-logs\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.616290 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-nb\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.619925 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-config-data\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.631663 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.634379 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkqwf\" (UniqueName: \"kubernetes.io/projected/dc14b10f-4cd2-4747-9137-39fb3f97ead4-kube-api-access-pkqwf\") pod \"dnsmasq-dns-6c6f5885c9-r95pq\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.636004 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwfcn\" (UniqueName: \"kubernetes.io/projected/c4432896-ed78-43ff-83fa-daf5ba668923-kube-api-access-hwfcn\") pod \"nova-api-0\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.763250 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.779215 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:36:48 crc kubenswrapper[4774]: I1121 15:36:48.788532 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:48.881383 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-qpm97"] Nov 21 15:36:49 crc kubenswrapper[4774]: W1121 15:36:48.897102 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfe5a2bf_1a38_4433_bd7b_b25e9df160d7.slice/crio-c1d2fcb75b493b5e3ca2e01460f5e77c3a5484a37dfcaf84b8e2fc0863ad5670 WatchSource:0}: Error finding container c1d2fcb75b493b5e3ca2e01460f5e77c3a5484a37dfcaf84b8e2fc0863ad5670: Status 404 returned error can't find the container with id c1d2fcb75b493b5e3ca2e01460f5e77c3a5484a37dfcaf84b8e2fc0863ad5670 Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:48.970105 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:36:49 crc kubenswrapper[4774]: W1121 15:36:48.997222 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd584d217_33a3_49fb_bc49_7a41563768fc.slice/crio-434fe72a5337ffdb089808e90451fa27dec4099f73878892501a9966581ccea5 WatchSource:0}: Error finding container 434fe72a5337ffdb089808e90451fa27dec4099f73878892501a9966581ccea5: Status 404 returned error can't find the container with id 434fe72a5337ffdb089808e90451fa27dec4099f73878892501a9966581ccea5 Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.011605 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z9dv6"] Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.013449 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.016299 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.016506 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.020122 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z9dv6"] Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.021075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-config-data\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.021141 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2tkk\" (UniqueName: \"kubernetes.io/projected/9911a5f9-8117-4f37-a08a-149593f09288-kube-api-access-m2tkk\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.021182 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-scripts\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.021209 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.120262 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d584d217-33a3-49fb-bc49-7a41563768fc","Type":"ContainerStarted","Data":"434fe72a5337ffdb089808e90451fa27dec4099f73878892501a9966581ccea5"} Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.122541 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2tkk\" (UniqueName: \"kubernetes.io/projected/9911a5f9-8117-4f37-a08a-149593f09288-kube-api-access-m2tkk\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.122618 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-scripts\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.122663 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.123388 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qpm97" event={"ID":"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7","Type":"ContainerStarted","Data":"c1d2fcb75b493b5e3ca2e01460f5e77c3a5484a37dfcaf84b8e2fc0863ad5670"} Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.125288 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-config-data\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.132086 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-scripts\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.134139 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-config-data\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.135601 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.140056 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.141192 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2tkk\" (UniqueName: \"kubernetes.io/projected/9911a5f9-8117-4f37-a08a-149593f09288-kube-api-access-m2tkk\") pod \"nova-cell1-conductor-db-sync-z9dv6\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: W1121 15:36:49.142242 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e00c2f7_6fd0_4422_9896_1e1764281b0c.slice/crio-82d5f1d82bb83202ef3195f4f2e933ce7ebede406c4602d1b00bc56c4e3639d9 WatchSource:0}: Error finding container 82d5f1d82bb83202ef3195f4f2e933ce7ebede406c4602d1b00bc56c4e3639d9: Status 404 returned error can't find the container with id 82d5f1d82bb83202ef3195f4f2e933ce7ebede406c4602d1b00bc56c4e3639d9 Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.357115 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.914473 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c6f5885c9-r95pq"] Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.917899 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:36:49 crc kubenswrapper[4774]: W1121 15:36:49.926233 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4432896_ed78_43ff_83fa_daf5ba668923.slice/crio-22b8cf88f844e48b94139402133a94a40d93059c982d16ac423734a0f49ad1de WatchSource:0}: Error finding container 22b8cf88f844e48b94139402133a94a40d93059c982d16ac423734a0f49ad1de: Status 404 returned error can't find the container with id 22b8cf88f844e48b94139402133a94a40d93059c982d16ac423734a0f49ad1de Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.927926 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z9dv6"] Nov 21 15:36:49 crc kubenswrapper[4774]: W1121 15:36:49.929220 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9911a5f9_8117_4f37_a08a_149593f09288.slice/crio-206b6842ee00cd460303a47f5855ec39fd86b9b5f8a74a404a502a2ab37229de WatchSource:0}: Error finding container 206b6842ee00cd460303a47f5855ec39fd86b9b5f8a74a404a502a2ab37229de: Status 404 returned error can't find the container with id 206b6842ee00cd460303a47f5855ec39fd86b9b5f8a74a404a502a2ab37229de Nov 21 15:36:49 crc kubenswrapper[4774]: I1121 15:36:49.935594 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.141560 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e00c2f7-6fd0-4422-9896-1e1764281b0c","Type":"ContainerStarted","Data":"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.141972 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e00c2f7-6fd0-4422-9896-1e1764281b0c","Type":"ContainerStarted","Data":"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.141988 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e00c2f7-6fd0-4422-9896-1e1764281b0c","Type":"ContainerStarted","Data":"82d5f1d82bb83202ef3195f4f2e933ce7ebede406c4602d1b00bc56c4e3639d9"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.146329 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dc385c54-02aa-4582-924c-3bc67c99b870","Type":"ContainerStarted","Data":"bac642411104fa859b55c4addd7867d551605da45f3cd24e850f5f41b64bb167"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.161616 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qpm97" event={"ID":"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7","Type":"ContainerStarted","Data":"014f44fa639be6bf4c672cad96a8b59b40e79b0559f7ff0c83508e29381b64b9"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.166130 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" event={"ID":"dc14b10f-4cd2-4747-9137-39fb3f97ead4","Type":"ContainerStarted","Data":"6c5fad3c154418c462ea98b1b7b15fd177f09c980cad2a3786c47d060f95f9ce"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.169509 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c4432896-ed78-43ff-83fa-daf5ba668923","Type":"ContainerStarted","Data":"22b8cf88f844e48b94139402133a94a40d93059c982d16ac423734a0f49ad1de"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.172752 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d584d217-33a3-49fb-bc49-7a41563768fc","Type":"ContainerStarted","Data":"031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.179511 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" event={"ID":"9911a5f9-8117-4f37-a08a-149593f09288","Type":"ContainerStarted","Data":"206b6842ee00cd460303a47f5855ec39fd86b9b5f8a74a404a502a2ab37229de"} Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.293953 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.2938788150000002 podStartE2EDuration="2.293878815s" podCreationTimestamp="2025-11-21 15:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:50.275310676 +0000 UTC m=+5600.927509965" watchObservedRunningTime="2025-11-21 15:36:50.293878815 +0000 UTC m=+5600.946078094" Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.321454 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" podStartSLOduration=2.321431211 podStartE2EDuration="2.321431211s" podCreationTimestamp="2025-11-21 15:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:50.288171553 +0000 UTC m=+5600.940370812" watchObservedRunningTime="2025-11-21 15:36:50.321431211 +0000 UTC m=+5600.973630470" Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.339459 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-qpm97" podStartSLOduration=3.339437684 podStartE2EDuration="3.339437684s" podCreationTimestamp="2025-11-21 15:36:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:50.306815564 +0000 UTC m=+5600.959014823" watchObservedRunningTime="2025-11-21 15:36:50.339437684 +0000 UTC m=+5600.991636943" Nov 21 15:36:50 crc kubenswrapper[4774]: I1121 15:36:50.342243 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.342232624 podStartE2EDuration="2.342232624s" podCreationTimestamp="2025-11-21 15:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:50.32737207 +0000 UTC m=+5600.979571329" watchObservedRunningTime="2025-11-21 15:36:50.342232624 +0000 UTC m=+5600.994431883" Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.191504 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerID="c216711707970b6a58618e62fe7d83d2aac29efe8f1f83a791349a4921dfa20c" exitCode=0 Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.191858 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" event={"ID":"dc14b10f-4cd2-4747-9137-39fb3f97ead4","Type":"ContainerDied","Data":"c216711707970b6a58618e62fe7d83d2aac29efe8f1f83a791349a4921dfa20c"} Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.197795 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c4432896-ed78-43ff-83fa-daf5ba668923","Type":"ContainerStarted","Data":"791921d7c8aab8bc0c0f17059f78098b9f2c5c9ab7d1904ff426217701314016"} Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.197886 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c4432896-ed78-43ff-83fa-daf5ba668923","Type":"ContainerStarted","Data":"acd533f82421eeea0661eaf57fea7b2c3f21cb770cfd0e9c38abba934144b93c"} Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.216348 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" event={"ID":"9911a5f9-8117-4f37-a08a-149593f09288","Type":"ContainerStarted","Data":"b0d06f7e9eeba2e0a87ee1156fe2e759d6835c7b17a5b532de61ab99dce6d232"} Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.237907 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dc385c54-02aa-4582-924c-3bc67c99b870","Type":"ContainerStarted","Data":"88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3"} Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.254901 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.25488085 podStartE2EDuration="3.25488085s" podCreationTimestamp="2025-11-21 15:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:51.249924329 +0000 UTC m=+5601.902123598" watchObservedRunningTime="2025-11-21 15:36:51.25488085 +0000 UTC m=+5601.907080099" Nov 21 15:36:51 crc kubenswrapper[4774]: I1121 15:36:51.277300 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.277278509 podStartE2EDuration="3.277278509s" podCreationTimestamp="2025-11-21 15:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:51.265423521 +0000 UTC m=+5601.917622780" watchObservedRunningTime="2025-11-21 15:36:51.277278509 +0000 UTC m=+5601.929477768" Nov 21 15:36:52 crc kubenswrapper[4774]: I1121 15:36:52.250488 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" event={"ID":"dc14b10f-4cd2-4747-9137-39fb3f97ead4","Type":"ContainerStarted","Data":"66123e6c311fdae3c77a271d72af3dab55c4fdc47c70c70737773017ff1dfcf7"} Nov 21 15:36:52 crc kubenswrapper[4774]: I1121 15:36:52.250910 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:52 crc kubenswrapper[4774]: I1121 15:36:52.281207 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" podStartSLOduration=4.281189596 podStartE2EDuration="4.281189596s" podCreationTimestamp="2025-11-21 15:36:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:52.270656416 +0000 UTC m=+5602.922855685" watchObservedRunningTime="2025-11-21 15:36:52.281189596 +0000 UTC m=+5602.933388855" Nov 21 15:36:53 crc kubenswrapper[4774]: I1121 15:36:53.270512 4774 generic.go:334] "Generic (PLEG): container finished" podID="9911a5f9-8117-4f37-a08a-149593f09288" containerID="b0d06f7e9eeba2e0a87ee1156fe2e759d6835c7b17a5b532de61ab99dce6d232" exitCode=0 Nov 21 15:36:53 crc kubenswrapper[4774]: I1121 15:36:53.270691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" event={"ID":"9911a5f9-8117-4f37-a08a-149593f09288","Type":"ContainerDied","Data":"b0d06f7e9eeba2e0a87ee1156fe2e759d6835c7b17a5b532de61ab99dce6d232"} Nov 21 15:36:53 crc kubenswrapper[4774]: I1121 15:36:53.388131 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 15:36:53 crc kubenswrapper[4774]: I1121 15:36:53.590125 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:36:53 crc kubenswrapper[4774]: I1121 15:36:53.590605 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:36:53 crc kubenswrapper[4774]: I1121 15:36:53.765741 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.285607 4774 generic.go:334] "Generic (PLEG): container finished" podID="cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" containerID="014f44fa639be6bf4c672cad96a8b59b40e79b0559f7ff0c83508e29381b64b9" exitCode=0 Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.285679 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qpm97" event={"ID":"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7","Type":"ContainerDied","Data":"014f44fa639be6bf4c672cad96a8b59b40e79b0559f7ff0c83508e29381b64b9"} Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.667529 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.859152 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-combined-ca-bundle\") pod \"9911a5f9-8117-4f37-a08a-149593f09288\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.859407 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2tkk\" (UniqueName: \"kubernetes.io/projected/9911a5f9-8117-4f37-a08a-149593f09288-kube-api-access-m2tkk\") pod \"9911a5f9-8117-4f37-a08a-149593f09288\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.859438 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-scripts\") pod \"9911a5f9-8117-4f37-a08a-149593f09288\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.859484 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-config-data\") pod \"9911a5f9-8117-4f37-a08a-149593f09288\" (UID: \"9911a5f9-8117-4f37-a08a-149593f09288\") " Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.864914 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-scripts" (OuterVolumeSpecName: "scripts") pod "9911a5f9-8117-4f37-a08a-149593f09288" (UID: "9911a5f9-8117-4f37-a08a-149593f09288"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.867042 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9911a5f9-8117-4f37-a08a-149593f09288-kube-api-access-m2tkk" (OuterVolumeSpecName: "kube-api-access-m2tkk") pod "9911a5f9-8117-4f37-a08a-149593f09288" (UID: "9911a5f9-8117-4f37-a08a-149593f09288"). InnerVolumeSpecName "kube-api-access-m2tkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.885363 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-config-data" (OuterVolumeSpecName: "config-data") pod "9911a5f9-8117-4f37-a08a-149593f09288" (UID: "9911a5f9-8117-4f37-a08a-149593f09288"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.894234 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9911a5f9-8117-4f37-a08a-149593f09288" (UID: "9911a5f9-8117-4f37-a08a-149593f09288"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.962138 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.962192 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.962202 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2tkk\" (UniqueName: \"kubernetes.io/projected/9911a5f9-8117-4f37-a08a-149593f09288-kube-api-access-m2tkk\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:54 crc kubenswrapper[4774]: I1121 15:36:54.962212 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9911a5f9-8117-4f37-a08a-149593f09288-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.298185 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" event={"ID":"9911a5f9-8117-4f37-a08a-149593f09288","Type":"ContainerDied","Data":"206b6842ee00cd460303a47f5855ec39fd86b9b5f8a74a404a502a2ab37229de"} Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.298245 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="206b6842ee00cd460303a47f5855ec39fd86b9b5f8a74a404a502a2ab37229de" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.298201 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-z9dv6" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.377778 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:36:55 crc kubenswrapper[4774]: E1121 15:36:55.378260 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9911a5f9-8117-4f37-a08a-149593f09288" containerName="nova-cell1-conductor-db-sync" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.378284 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9911a5f9-8117-4f37-a08a-149593f09288" containerName="nova-cell1-conductor-db-sync" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.378523 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9911a5f9-8117-4f37-a08a-149593f09288" containerName="nova-cell1-conductor-db-sync" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.379453 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.390465 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.395078 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.576804 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvmhf\" (UniqueName: \"kubernetes.io/projected/996312d6-6bfd-47a2-83a4-d43364658f94-kube-api-access-rvmhf\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.577193 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.577293 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.679936 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.680057 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvmhf\" (UniqueName: \"kubernetes.io/projected/996312d6-6bfd-47a2-83a4-d43364658f94-kube-api-access-rvmhf\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.680111 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.685630 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.686361 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.700946 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvmhf\" (UniqueName: \"kubernetes.io/projected/996312d6-6bfd-47a2-83a4-d43364658f94-kube-api-access-rvmhf\") pod \"nova-cell1-conductor-0\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.793429 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.989908 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-config-data\") pod \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.989958 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-combined-ca-bundle\") pod \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.990024 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-scripts\") pod \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.990046 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dr2wc\" (UniqueName: \"kubernetes.io/projected/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-kube-api-access-dr2wc\") pod \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\" (UID: \"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7\") " Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.994704 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-kube-api-access-dr2wc" (OuterVolumeSpecName: "kube-api-access-dr2wc") pod "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" (UID: "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7"). InnerVolumeSpecName "kube-api-access-dr2wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.995241 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-scripts" (OuterVolumeSpecName: "scripts") pod "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" (UID: "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:55 crc kubenswrapper[4774]: I1121 15:36:55.995810 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.022730 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-config-data" (OuterVolumeSpecName: "config-data") pod "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" (UID: "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.023791 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" (UID: "cfe5a2bf-1a38-4433-bd7b-b25e9df160d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.092684 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.092722 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.092737 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.092748 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dr2wc\" (UniqueName: \"kubernetes.io/projected/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7-kube-api-access-dr2wc\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.308575 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-qpm97" event={"ID":"cfe5a2bf-1a38-4433-bd7b-b25e9df160d7","Type":"ContainerDied","Data":"c1d2fcb75b493b5e3ca2e01460f5e77c3a5484a37dfcaf84b8e2fc0863ad5670"} Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.308894 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1d2fcb75b493b5e3ca2e01460f5e77c3a5484a37dfcaf84b8e2fc0863ad5670" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.308671 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-qpm97" Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.432203 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.590689 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.593694 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-log" containerID="cri-o://791921d7c8aab8bc0c0f17059f78098b9f2c5c9ab7d1904ff426217701314016" gracePeriod=30 Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.593797 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-api" containerID="cri-o://acd533f82421eeea0661eaf57fea7b2c3f21cb770cfd0e9c38abba934144b93c" gracePeriod=30 Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.604627 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.604972 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d584d217-33a3-49fb-bc49-7a41563768fc" containerName="nova-scheduler-scheduler" containerID="cri-o://031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f" gracePeriod=30 Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.631532 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.631794 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-log" containerID="cri-o://b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d" gracePeriod=30 Nov 21 15:36:56 crc kubenswrapper[4774]: I1121 15:36:56.632327 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-metadata" containerID="cri-o://190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34" gracePeriod=30 Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.127927 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.316999 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-config-data\") pod \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.317056 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-combined-ca-bundle\") pod \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.317209 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e00c2f7-6fd0-4422-9896-1e1764281b0c-logs\") pod \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.317251 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gg8b\" (UniqueName: \"kubernetes.io/projected/6e00c2f7-6fd0-4422-9896-1e1764281b0c-kube-api-access-5gg8b\") pod \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\" (UID: \"6e00c2f7-6fd0-4422-9896-1e1764281b0c\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.320507 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e00c2f7-6fd0-4422-9896-1e1764281b0c-logs" (OuterVolumeSpecName: "logs") pod "6e00c2f7-6fd0-4422-9896-1e1764281b0c" (UID: "6e00c2f7-6fd0-4422-9896-1e1764281b0c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.322578 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e00c2f7-6fd0-4422-9896-1e1764281b0c-kube-api-access-5gg8b" (OuterVolumeSpecName: "kube-api-access-5gg8b") pod "6e00c2f7-6fd0-4422-9896-1e1764281b0c" (UID: "6e00c2f7-6fd0-4422-9896-1e1764281b0c"). InnerVolumeSpecName "kube-api-access-5gg8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.325245 4774 generic.go:334] "Generic (PLEG): container finished" podID="c4432896-ed78-43ff-83fa-daf5ba668923" containerID="acd533f82421eeea0661eaf57fea7b2c3f21cb770cfd0e9c38abba934144b93c" exitCode=0 Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.325275 4774 generic.go:334] "Generic (PLEG): container finished" podID="c4432896-ed78-43ff-83fa-daf5ba668923" containerID="791921d7c8aab8bc0c0f17059f78098b9f2c5c9ab7d1904ff426217701314016" exitCode=143 Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.325318 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c4432896-ed78-43ff-83fa-daf5ba668923","Type":"ContainerDied","Data":"acd533f82421eeea0661eaf57fea7b2c3f21cb770cfd0e9c38abba934144b93c"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.325469 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c4432896-ed78-43ff-83fa-daf5ba668923","Type":"ContainerDied","Data":"791921d7c8aab8bc0c0f17059f78098b9f2c5c9ab7d1904ff426217701314016"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.326953 4774 generic.go:334] "Generic (PLEG): container finished" podID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerID="190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34" exitCode=0 Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.326995 4774 generic.go:334] "Generic (PLEG): container finished" podID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerID="b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d" exitCode=143 Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.327052 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e00c2f7-6fd0-4422-9896-1e1764281b0c","Type":"ContainerDied","Data":"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.327070 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e00c2f7-6fd0-4422-9896-1e1764281b0c","Type":"ContainerDied","Data":"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.327080 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6e00c2f7-6fd0-4422-9896-1e1764281b0c","Type":"ContainerDied","Data":"82d5f1d82bb83202ef3195f4f2e933ce7ebede406c4602d1b00bc56c4e3639d9"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.327094 4774 scope.go:117] "RemoveContainer" containerID="190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.327426 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.331956 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"996312d6-6bfd-47a2-83a4-d43364658f94","Type":"ContainerStarted","Data":"d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.332007 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"996312d6-6bfd-47a2-83a4-d43364658f94","Type":"ContainerStarted","Data":"9f224c21fe26268d0f680ab7a1b3da95ff749c4d9060ac9c47b92e32f48cbc7a"} Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.332121 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.352457 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-config-data" (OuterVolumeSpecName: "config-data") pod "6e00c2f7-6fd0-4422-9896-1e1764281b0c" (UID: "6e00c2f7-6fd0-4422-9896-1e1764281b0c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.359341 4774 scope.go:117] "RemoveContainer" containerID="b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.368738 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.368718473 podStartE2EDuration="2.368718473s" podCreationTimestamp="2025-11-21 15:36:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:57.353011145 +0000 UTC m=+5608.005210404" watchObservedRunningTime="2025-11-21 15:36:57.368718473 +0000 UTC m=+5608.020917732" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.373015 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e00c2f7-6fd0-4422-9896-1e1764281b0c" (UID: "6e00c2f7-6fd0-4422-9896-1e1764281b0c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.381300 4774 scope.go:117] "RemoveContainer" containerID="190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34" Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.381613 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34\": container with ID starting with 190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34 not found: ID does not exist" containerID="190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.381655 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34"} err="failed to get container status \"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34\": rpc error: code = NotFound desc = could not find container \"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34\": container with ID starting with 190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34 not found: ID does not exist" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.381682 4774 scope.go:117] "RemoveContainer" containerID="b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d" Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.382223 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d\": container with ID starting with b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d not found: ID does not exist" containerID="b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.382250 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d"} err="failed to get container status \"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d\": rpc error: code = NotFound desc = could not find container \"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d\": container with ID starting with b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d not found: ID does not exist" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.382268 4774 scope.go:117] "RemoveContainer" containerID="190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.382529 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34"} err="failed to get container status \"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34\": rpc error: code = NotFound desc = could not find container \"190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34\": container with ID starting with 190544ada4a63c15abdb32f79cce3fd62487e1d080b9fa4fa8fa1591dd665d34 not found: ID does not exist" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.382555 4774 scope.go:117] "RemoveContainer" containerID="b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.382806 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d"} err="failed to get container status \"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d\": rpc error: code = NotFound desc = could not find container \"b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d\": container with ID starting with b3a920cb711a725cce3fcc436bd47b734304bd9eefa417678e66c6db9a8f401d not found: ID does not exist" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.420178 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.420221 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e00c2f7-6fd0-4422-9896-1e1764281b0c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.420235 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e00c2f7-6fd0-4422-9896-1e1764281b0c-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.420248 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gg8b\" (UniqueName: \"kubernetes.io/projected/6e00c2f7-6fd0-4422-9896-1e1764281b0c-kube-api-access-5gg8b\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.497136 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.622545 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4432896-ed78-43ff-83fa-daf5ba668923-logs\") pod \"c4432896-ed78-43ff-83fa-daf5ba668923\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.622660 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwfcn\" (UniqueName: \"kubernetes.io/projected/c4432896-ed78-43ff-83fa-daf5ba668923-kube-api-access-hwfcn\") pod \"c4432896-ed78-43ff-83fa-daf5ba668923\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.622690 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-combined-ca-bundle\") pod \"c4432896-ed78-43ff-83fa-daf5ba668923\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.622812 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-config-data\") pod \"c4432896-ed78-43ff-83fa-daf5ba668923\" (UID: \"c4432896-ed78-43ff-83fa-daf5ba668923\") " Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.622951 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4432896-ed78-43ff-83fa-daf5ba668923-logs" (OuterVolumeSpecName: "logs") pod "c4432896-ed78-43ff-83fa-daf5ba668923" (UID: "c4432896-ed78-43ff-83fa-daf5ba668923"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.623521 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4432896-ed78-43ff-83fa-daf5ba668923-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.626184 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4432896-ed78-43ff-83fa-daf5ba668923-kube-api-access-hwfcn" (OuterVolumeSpecName: "kube-api-access-hwfcn") pod "c4432896-ed78-43ff-83fa-daf5ba668923" (UID: "c4432896-ed78-43ff-83fa-daf5ba668923"). InnerVolumeSpecName "kube-api-access-hwfcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.644103 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-config-data" (OuterVolumeSpecName: "config-data") pod "c4432896-ed78-43ff-83fa-daf5ba668923" (UID: "c4432896-ed78-43ff-83fa-daf5ba668923"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.647315 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4432896-ed78-43ff-83fa-daf5ba668923" (UID: "c4432896-ed78-43ff-83fa-daf5ba668923"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.717838 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.724998 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.725046 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwfcn\" (UniqueName: \"kubernetes.io/projected/c4432896-ed78-43ff-83fa-daf5ba668923-kube-api-access-hwfcn\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.725058 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4432896-ed78-43ff-83fa-daf5ba668923-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.725559 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747031 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.747558 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-api" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747579 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-api" Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.747590 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" containerName="nova-manage" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747596 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" containerName="nova-manage" Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.747607 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-log" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747613 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-log" Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.747626 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-log" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747631 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-log" Nov 21 15:36:57 crc kubenswrapper[4774]: E1121 15:36:57.747643 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-metadata" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747649 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-metadata" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747853 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-log" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747873 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-log" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747885 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" containerName="nova-manage" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747895 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" containerName="nova-api-api" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.747901 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" containerName="nova-metadata-metadata" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.750067 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.752423 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.759510 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.927854 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-config-data\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.927899 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2c7dg\" (UniqueName: \"kubernetes.io/projected/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-kube-api-access-2c7dg\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.928129 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-logs\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:57 crc kubenswrapper[4774]: I1121 15:36:57.928177 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.029914 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.031102 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-config-data\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.031199 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2c7dg\" (UniqueName: \"kubernetes.io/projected/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-kube-api-access-2c7dg\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.031408 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-logs\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.032244 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-logs\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.035608 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-config-data\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.035973 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.047811 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2c7dg\" (UniqueName: \"kubernetes.io/projected/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-kube-api-access-2c7dg\") pod \"nova-metadata-0\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.083441 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.118387 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e00c2f7-6fd0-4422-9896-1e1764281b0c" path="/var/lib/kubelet/pods/6e00c2f7-6fd0-4422-9896-1e1764281b0c/volumes" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.341919 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c4432896-ed78-43ff-83fa-daf5ba668923","Type":"ContainerDied","Data":"22b8cf88f844e48b94139402133a94a40d93059c982d16ac423734a0f49ad1de"} Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.342254 4774 scope.go:117] "RemoveContainer" containerID="acd533f82421eeea0661eaf57fea7b2c3f21cb770cfd0e9c38abba934144b93c" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.342144 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.366164 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.377429 4774 scope.go:117] "RemoveContainer" containerID="791921d7c8aab8bc0c0f17059f78098b9f2c5c9ab7d1904ff426217701314016" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.382599 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.391103 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.394648 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.398099 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.407303 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.506192 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:36:58 crc kubenswrapper[4774]: W1121 15:36:58.506733 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcc3fed4_8bcf_49bf_8d78_68f038d20e9d.slice/crio-06a260d1fc3f38dece8f23d7987624e148f58c59f2a182d0072fcfafe253ab67 WatchSource:0}: Error finding container 06a260d1fc3f38dece8f23d7987624e148f58c59f2a182d0072fcfafe253ab67: Status 404 returned error can't find the container with id 06a260d1fc3f38dece8f23d7987624e148f58c59f2a182d0072fcfafe253ab67 Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.539175 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8npv\" (UniqueName: \"kubernetes.io/projected/4496cb1d-522f-46e5-aedb-f1491b5e938c-kube-api-access-c8npv\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.539522 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4496cb1d-522f-46e5-aedb-f1491b5e938c-logs\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.539603 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-config-data\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.539797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.641861 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-config-data\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.641978 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.642091 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8npv\" (UniqueName: \"kubernetes.io/projected/4496cb1d-522f-46e5-aedb-f1491b5e938c-kube-api-access-c8npv\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.642113 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4496cb1d-522f-46e5-aedb-f1491b5e938c-logs\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.642712 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4496cb1d-522f-46e5-aedb-f1491b5e938c-logs\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.647154 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.650700 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-config-data\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.662888 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8npv\" (UniqueName: \"kubernetes.io/projected/4496cb1d-522f-46e5-aedb-f1491b5e938c-kube-api-access-c8npv\") pod \"nova-api-0\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.722368 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.765469 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.775618 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.795598 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.877874 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568f54959f-kwmrs"] Nov 21 15:36:58 crc kubenswrapper[4774]: I1121 15:36:58.878135 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerName="dnsmasq-dns" containerID="cri-o://c2ae4e5c91610d5cd557be03b5e25f0e7f52ca8b744ccc6983964f1d334318f4" gracePeriod=10 Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.147330 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.356796 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4496cb1d-522f-46e5-aedb-f1491b5e938c","Type":"ContainerStarted","Data":"0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a"} Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.356865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4496cb1d-522f-46e5-aedb-f1491b5e938c","Type":"ContainerStarted","Data":"ab15098edebb27d88d75eede97cea4369378c90c3e52f1539ceb1315f216ecd6"} Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.361763 4774 generic.go:334] "Generic (PLEG): container finished" podID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerID="c2ae4e5c91610d5cd557be03b5e25f0e7f52ca8b744ccc6983964f1d334318f4" exitCode=0 Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.361866 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" event={"ID":"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3","Type":"ContainerDied","Data":"c2ae4e5c91610d5cd557be03b5e25f0e7f52ca8b744ccc6983964f1d334318f4"} Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.364443 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d","Type":"ContainerStarted","Data":"460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943"} Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.364480 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d","Type":"ContainerStarted","Data":"5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b"} Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.364490 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d","Type":"ContainerStarted","Data":"06a260d1fc3f38dece8f23d7987624e148f58c59f2a182d0072fcfafe253ab67"} Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.373502 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.376890 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.395022 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.395003394 podStartE2EDuration="2.395003394s" podCreationTimestamp="2025-11-21 15:36:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:36:59.381991724 +0000 UTC m=+5610.034190983" watchObservedRunningTime="2025-11-21 15:36:59.395003394 +0000 UTC m=+5610.047202653" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.496245 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9vfq\" (UniqueName: \"kubernetes.io/projected/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-kube-api-access-r9vfq\") pod \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.496562 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-nb\") pod \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.496611 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-dns-svc\") pod \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.496655 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-sb\") pod \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.496693 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-config\") pod \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\" (UID: \"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3\") " Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.504115 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-kube-api-access-r9vfq" (OuterVolumeSpecName: "kube-api-access-r9vfq") pod "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" (UID: "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3"). InnerVolumeSpecName "kube-api-access-r9vfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.553313 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" (UID: "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.555296 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" (UID: "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.558914 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-config" (OuterVolumeSpecName: "config") pod "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" (UID: "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.561085 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" (UID: "a74c1583-05f2-4bb9-b1bc-51f1d0e304f3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.598331 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9vfq\" (UniqueName: \"kubernetes.io/projected/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-kube-api-access-r9vfq\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.598366 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.598376 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.598384 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.598393 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.600417 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:36:59 crc kubenswrapper[4774]: I1121 15:36:59.600462 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.104905 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4432896-ed78-43ff-83fa-daf5ba668923" path="/var/lib/kubelet/pods/c4432896-ed78-43ff-83fa-daf5ba668923/volumes" Nov 21 15:37:00 crc kubenswrapper[4774]: E1121 15:37:00.194421 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda74c1583_05f2_4bb9_b1bc_51f1d0e304f3.slice\": RecentStats: unable to find data in memory cache]" Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.379661 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" event={"ID":"a74c1583-05f2-4bb9-b1bc-51f1d0e304f3","Type":"ContainerDied","Data":"66e9c01c298d6e3b574e51c3d4c396364c9902cbd336c89786c880525e85a2b8"} Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.379980 4774 scope.go:117] "RemoveContainer" containerID="c2ae4e5c91610d5cd557be03b5e25f0e7f52ca8b744ccc6983964f1d334318f4" Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.380041 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-568f54959f-kwmrs" Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.382437 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4496cb1d-522f-46e5-aedb-f1491b5e938c","Type":"ContainerStarted","Data":"c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2"} Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.399523 4774 scope.go:117] "RemoveContainer" containerID="fe46e7075cbfdc054f99fb90b456dd662e1863cf7ffcdbf0beb819d6d645cb8b" Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.416046 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.41602401 podStartE2EDuration="2.41602401s" podCreationTimestamp="2025-11-21 15:36:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:00.402722511 +0000 UTC m=+5611.054921790" watchObservedRunningTime="2025-11-21 15:37:00.41602401 +0000 UTC m=+5611.068223269" Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.427560 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-568f54959f-kwmrs"] Nov 21 15:37:00 crc kubenswrapper[4774]: I1121 15:37:00.441800 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-568f54959f-kwmrs"] Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.184331 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.327352 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-config-data\") pod \"d584d217-33a3-49fb-bc49-7a41563768fc\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.327536 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-combined-ca-bundle\") pod \"d584d217-33a3-49fb-bc49-7a41563768fc\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.327629 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcf6s\" (UniqueName: \"kubernetes.io/projected/d584d217-33a3-49fb-bc49-7a41563768fc-kube-api-access-fcf6s\") pod \"d584d217-33a3-49fb-bc49-7a41563768fc\" (UID: \"d584d217-33a3-49fb-bc49-7a41563768fc\") " Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.335674 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d584d217-33a3-49fb-bc49-7a41563768fc-kube-api-access-fcf6s" (OuterVolumeSpecName: "kube-api-access-fcf6s") pod "d584d217-33a3-49fb-bc49-7a41563768fc" (UID: "d584d217-33a3-49fb-bc49-7a41563768fc"). InnerVolumeSpecName "kube-api-access-fcf6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.355882 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-config-data" (OuterVolumeSpecName: "config-data") pod "d584d217-33a3-49fb-bc49-7a41563768fc" (UID: "d584d217-33a3-49fb-bc49-7a41563768fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.357005 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d584d217-33a3-49fb-bc49-7a41563768fc" (UID: "d584d217-33a3-49fb-bc49-7a41563768fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.393598 4774 generic.go:334] "Generic (PLEG): container finished" podID="d584d217-33a3-49fb-bc49-7a41563768fc" containerID="031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f" exitCode=0 Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.394572 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d584d217-33a3-49fb-bc49-7a41563768fc","Type":"ContainerDied","Data":"031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f"} Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.396041 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d584d217-33a3-49fb-bc49-7a41563768fc","Type":"ContainerDied","Data":"434fe72a5337ffdb089808e90451fa27dec4099f73878892501a9966581ccea5"} Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.394636 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.396265 4774 scope.go:117] "RemoveContainer" containerID="031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.432411 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcf6s\" (UniqueName: \"kubernetes.io/projected/d584d217-33a3-49fb-bc49-7a41563768fc-kube-api-access-fcf6s\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.432786 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.432797 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d584d217-33a3-49fb-bc49-7a41563768fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.437116 4774 scope.go:117] "RemoveContainer" containerID="031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f" Nov 21 15:37:01 crc kubenswrapper[4774]: E1121 15:37:01.438489 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f\": container with ID starting with 031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f not found: ID does not exist" containerID="031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.438546 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f"} err="failed to get container status \"031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f\": rpc error: code = NotFound desc = could not find container \"031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f\": container with ID starting with 031f05187a788d67b0577e3bfe071dadfb136f96210aced909a25ba296de051f not found: ID does not exist" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.449145 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.458191 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.466238 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:01 crc kubenswrapper[4774]: E1121 15:37:01.466778 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerName="dnsmasq-dns" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.466812 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerName="dnsmasq-dns" Nov 21 15:37:01 crc kubenswrapper[4774]: E1121 15:37:01.466859 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerName="init" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.466868 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerName="init" Nov 21 15:37:01 crc kubenswrapper[4774]: E1121 15:37:01.466896 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d584d217-33a3-49fb-bc49-7a41563768fc" containerName="nova-scheduler-scheduler" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.466906 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d584d217-33a3-49fb-bc49-7a41563768fc" containerName="nova-scheduler-scheduler" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.467123 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" containerName="dnsmasq-dns" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.467151 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d584d217-33a3-49fb-bc49-7a41563768fc" containerName="nova-scheduler-scheduler" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.467992 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.470096 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.472966 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.637400 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.637509 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwvz5\" (UniqueName: \"kubernetes.io/projected/9c4d42b3-28e8-476c-9639-b1c7010b3909-kube-api-access-bwvz5\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.637554 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-config-data\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.739952 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwvz5\" (UniqueName: \"kubernetes.io/projected/9c4d42b3-28e8-476c-9639-b1c7010b3909-kube-api-access-bwvz5\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.740025 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-config-data\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.740142 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.743717 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.744007 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-config-data\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.756913 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwvz5\" (UniqueName: \"kubernetes.io/projected/9c4d42b3-28e8-476c-9639-b1c7010b3909-kube-api-access-bwvz5\") pod \"nova-scheduler-0\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:01 crc kubenswrapper[4774]: I1121 15:37:01.797351 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:02 crc kubenswrapper[4774]: I1121 15:37:02.106656 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a74c1583-05f2-4bb9-b1bc-51f1d0e304f3" path="/var/lib/kubelet/pods/a74c1583-05f2-4bb9-b1bc-51f1d0e304f3/volumes" Nov 21 15:37:02 crc kubenswrapper[4774]: I1121 15:37:02.108030 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d584d217-33a3-49fb-bc49-7a41563768fc" path="/var/lib/kubelet/pods/d584d217-33a3-49fb-bc49-7a41563768fc/volumes" Nov 21 15:37:02 crc kubenswrapper[4774]: I1121 15:37:02.223225 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:02 crc kubenswrapper[4774]: W1121 15:37:02.232456 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c4d42b3_28e8_476c_9639_b1c7010b3909.slice/crio-0c65a433f7affb6108aa892497b67dd80394ffd996101f8151c6cb0308768c35 WatchSource:0}: Error finding container 0c65a433f7affb6108aa892497b67dd80394ffd996101f8151c6cb0308768c35: Status 404 returned error can't find the container with id 0c65a433f7affb6108aa892497b67dd80394ffd996101f8151c6cb0308768c35 Nov 21 15:37:02 crc kubenswrapper[4774]: I1121 15:37:02.413922 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9c4d42b3-28e8-476c-9639-b1c7010b3909","Type":"ContainerStarted","Data":"0c65a433f7affb6108aa892497b67dd80394ffd996101f8151c6cb0308768c35"} Nov 21 15:37:03 crc kubenswrapper[4774]: I1121 15:37:03.084541 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:37:03 crc kubenswrapper[4774]: I1121 15:37:03.084925 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:37:03 crc kubenswrapper[4774]: I1121 15:37:03.424502 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9c4d42b3-28e8-476c-9639-b1c7010b3909","Type":"ContainerStarted","Data":"a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8"} Nov 21 15:37:03 crc kubenswrapper[4774]: I1121 15:37:03.447349 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.447324951 podStartE2EDuration="2.447324951s" podCreationTimestamp="2025-11-21 15:37:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:03.443050379 +0000 UTC m=+5614.095249638" watchObservedRunningTime="2025-11-21 15:37:03.447324951 +0000 UTC m=+5614.099524220" Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.026080 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.798480 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.885055 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-nvdbj"] Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.886762 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.893554 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.893554 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Nov 21 15:37:06 crc kubenswrapper[4774]: I1121 15:37:06.894863 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-nvdbj"] Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.036802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.036950 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.036974 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb7bw\" (UniqueName: \"kubernetes.io/projected/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-kube-api-access-wb7bw\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.037032 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-scripts\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.138578 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-scripts\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.138735 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.138783 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.138807 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb7bw\" (UniqueName: \"kubernetes.io/projected/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-kube-api-access-wb7bw\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.144453 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.154361 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-scripts\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.157448 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.158554 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb7bw\" (UniqueName: \"kubernetes.io/projected/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-kube-api-access-wb7bw\") pod \"nova-cell1-cell-mapping-nvdbj\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.212410 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:07 crc kubenswrapper[4774]: I1121 15:37:07.665277 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-nvdbj"] Nov 21 15:37:07 crc kubenswrapper[4774]: W1121 15:37:07.671545 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode65de5ed_1d20_4d17_a53d_7ed8ad06d0aa.slice/crio-efbf313bdaa704efebcb84d00ed83db2f51860e2c27a0b3e8fc1f4b437232144 WatchSource:0}: Error finding container efbf313bdaa704efebcb84d00ed83db2f51860e2c27a0b3e8fc1f4b437232144: Status 404 returned error can't find the container with id efbf313bdaa704efebcb84d00ed83db2f51860e2c27a0b3e8fc1f4b437232144 Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.083968 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.084662 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.481462 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-nvdbj" event={"ID":"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa","Type":"ContainerStarted","Data":"56d55397fb790979133be00a350a9eb897a08b90a68e2febedcead8a0484339e"} Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.481637 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-nvdbj" event={"ID":"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa","Type":"ContainerStarted","Data":"efbf313bdaa704efebcb84d00ed83db2f51860e2c27a0b3e8fc1f4b437232144"} Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.507941 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-nvdbj" podStartSLOduration=2.507920241 podStartE2EDuration="2.507920241s" podCreationTimestamp="2025-11-21 15:37:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:08.495779665 +0000 UTC m=+5619.147978934" watchObservedRunningTime="2025-11-21 15:37:08.507920241 +0000 UTC m=+5619.160119500" Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.723610 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 15:37:08 crc kubenswrapper[4774]: I1121 15:37:08.723901 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 15:37:09 crc kubenswrapper[4774]: I1121 15:37:09.167199 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.67:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:09 crc kubenswrapper[4774]: I1121 15:37:09.168269 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.67:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:09 crc kubenswrapper[4774]: I1121 15:37:09.806019 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.68:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:09 crc kubenswrapper[4774]: I1121 15:37:09.806072 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.68:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:11 crc kubenswrapper[4774]: I1121 15:37:11.798579 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 15:37:11 crc kubenswrapper[4774]: I1121 15:37:11.846780 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 15:37:12 crc kubenswrapper[4774]: I1121 15:37:12.520986 4774 generic.go:334] "Generic (PLEG): container finished" podID="e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" containerID="56d55397fb790979133be00a350a9eb897a08b90a68e2febedcead8a0484339e" exitCode=0 Nov 21 15:37:12 crc kubenswrapper[4774]: I1121 15:37:12.521174 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-nvdbj" event={"ID":"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa","Type":"ContainerDied","Data":"56d55397fb790979133be00a350a9eb897a08b90a68e2febedcead8a0484339e"} Nov 21 15:37:12 crc kubenswrapper[4774]: I1121 15:37:12.552132 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.849132 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.974912 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-combined-ca-bundle\") pod \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.975020 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data\") pod \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.975173 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-scripts\") pod \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.975202 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wb7bw\" (UniqueName: \"kubernetes.io/projected/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-kube-api-access-wb7bw\") pod \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.981093 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-scripts" (OuterVolumeSpecName: "scripts") pod "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" (UID: "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:13 crc kubenswrapper[4774]: I1121 15:37:13.982143 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-kube-api-access-wb7bw" (OuterVolumeSpecName: "kube-api-access-wb7bw") pod "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" (UID: "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa"). InnerVolumeSpecName "kube-api-access-wb7bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:14 crc kubenswrapper[4774]: E1121 15:37:14.002345 4774 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data podName:e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa nodeName:}" failed. No retries permitted until 2025-11-21 15:37:14.502317436 +0000 UTC m=+5625.154516695 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data") pod "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" (UID: "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa") : error deleting /var/lib/kubelet/pods/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa/volume-subpaths: remove /var/lib/kubelet/pods/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa/volume-subpaths: no such file or directory Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.005447 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" (UID: "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.077162 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.077194 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wb7bw\" (UniqueName: \"kubernetes.io/projected/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-kube-api-access-wb7bw\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.077205 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.540633 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-nvdbj" event={"ID":"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa","Type":"ContainerDied","Data":"efbf313bdaa704efebcb84d00ed83db2f51860e2c27a0b3e8fc1f4b437232144"} Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.540686 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efbf313bdaa704efebcb84d00ed83db2f51860e2c27a0b3e8fc1f4b437232144" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.540987 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-nvdbj" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.595951 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data\") pod \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\" (UID: \"e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa\") " Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.603180 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data" (OuterVolumeSpecName: "config-data") pod "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" (UID: "e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.698608 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.733539 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.733804 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-log" containerID="cri-o://0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a" gracePeriod=30 Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.733988 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-api" containerID="cri-o://c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2" gracePeriod=30 Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.752800 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.753004 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="9c4d42b3-28e8-476c-9639-b1c7010b3909" containerName="nova-scheduler-scheduler" containerID="cri-o://a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8" gracePeriod=30 Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.769945 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.770260 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-log" containerID="cri-o://5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b" gracePeriod=30 Nov 21 15:37:14 crc kubenswrapper[4774]: I1121 15:37:14.770520 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-metadata" containerID="cri-o://460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943" gracePeriod=30 Nov 21 15:37:15 crc kubenswrapper[4774]: I1121 15:37:15.550169 4774 generic.go:334] "Generic (PLEG): container finished" podID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerID="0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a" exitCode=143 Nov 21 15:37:15 crc kubenswrapper[4774]: I1121 15:37:15.550254 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4496cb1d-522f-46e5-aedb-f1491b5e938c","Type":"ContainerDied","Data":"0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a"} Nov 21 15:37:15 crc kubenswrapper[4774]: I1121 15:37:15.551493 4774 generic.go:334] "Generic (PLEG): container finished" podID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerID="5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b" exitCode=143 Nov 21 15:37:15 crc kubenswrapper[4774]: I1121 15:37:15.551522 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d","Type":"ContainerDied","Data":"5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b"} Nov 21 15:37:16 crc kubenswrapper[4774]: E1121 15:37:16.802933 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 15:37:16 crc kubenswrapper[4774]: E1121 15:37:16.805071 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 15:37:16 crc kubenswrapper[4774]: E1121 15:37:16.807338 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 15:37:16 crc kubenswrapper[4774]: E1121 15:37:16.807378 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="9c4d42b3-28e8-476c-9639-b1c7010b3909" containerName="nova-scheduler-scheduler" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.400402 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.419875 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.468414 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4496cb1d-522f-46e5-aedb-f1491b5e938c-logs\") pod \"4496cb1d-522f-46e5-aedb-f1491b5e938c\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.468629 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-config-data\") pod \"4496cb1d-522f-46e5-aedb-f1491b5e938c\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.468798 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-combined-ca-bundle\") pod \"4496cb1d-522f-46e5-aedb-f1491b5e938c\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.468973 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4496cb1d-522f-46e5-aedb-f1491b5e938c-logs" (OuterVolumeSpecName: "logs") pod "4496cb1d-522f-46e5-aedb-f1491b5e938c" (UID: "4496cb1d-522f-46e5-aedb-f1491b5e938c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.469070 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8npv\" (UniqueName: \"kubernetes.io/projected/4496cb1d-522f-46e5-aedb-f1491b5e938c-kube-api-access-c8npv\") pod \"4496cb1d-522f-46e5-aedb-f1491b5e938c\" (UID: \"4496cb1d-522f-46e5-aedb-f1491b5e938c\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.470000 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4496cb1d-522f-46e5-aedb-f1491b5e938c-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.474218 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4496cb1d-522f-46e5-aedb-f1491b5e938c-kube-api-access-c8npv" (OuterVolumeSpecName: "kube-api-access-c8npv") pod "4496cb1d-522f-46e5-aedb-f1491b5e938c" (UID: "4496cb1d-522f-46e5-aedb-f1491b5e938c"). InnerVolumeSpecName "kube-api-access-c8npv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.496157 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4496cb1d-522f-46e5-aedb-f1491b5e938c" (UID: "4496cb1d-522f-46e5-aedb-f1491b5e938c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.498150 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-config-data" (OuterVolumeSpecName: "config-data") pod "4496cb1d-522f-46e5-aedb-f1491b5e938c" (UID: "4496cb1d-522f-46e5-aedb-f1491b5e938c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.571066 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-combined-ca-bundle\") pod \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.571344 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-logs\") pod \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.571436 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2c7dg\" (UniqueName: \"kubernetes.io/projected/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-kube-api-access-2c7dg\") pod \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.571526 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-config-data\") pod \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\" (UID: \"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.572008 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8npv\" (UniqueName: \"kubernetes.io/projected/4496cb1d-522f-46e5-aedb-f1491b5e938c-kube-api-access-c8npv\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.572028 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.572040 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4496cb1d-522f-46e5-aedb-f1491b5e938c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.572239 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-logs" (OuterVolumeSpecName: "logs") pod "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" (UID: "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.577781 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-kube-api-access-2c7dg" (OuterVolumeSpecName: "kube-api-access-2c7dg") pod "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" (UID: "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d"). InnerVolumeSpecName "kube-api-access-2c7dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.599219 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-config-data" (OuterVolumeSpecName: "config-data") pod "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" (UID: "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.604736 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" (UID: "dcc3fed4-8bcf-49bf-8d78-68f038d20e9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.610450 4774 generic.go:334] "Generic (PLEG): container finished" podID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerID="c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2" exitCode=0 Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.610490 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.610505 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4496cb1d-522f-46e5-aedb-f1491b5e938c","Type":"ContainerDied","Data":"c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2"} Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.610685 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4496cb1d-522f-46e5-aedb-f1491b5e938c","Type":"ContainerDied","Data":"ab15098edebb27d88d75eede97cea4369378c90c3e52f1539ceb1315f216ecd6"} Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.610707 4774 scope.go:117] "RemoveContainer" containerID="c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.618180 4774 generic.go:334] "Generic (PLEG): container finished" podID="9c4d42b3-28e8-476c-9639-b1c7010b3909" containerID="a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8" exitCode=0 Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.618283 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9c4d42b3-28e8-476c-9639-b1c7010b3909","Type":"ContainerDied","Data":"a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8"} Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.621584 4774 generic.go:334] "Generic (PLEG): container finished" podID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerID="460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943" exitCode=0 Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.621619 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d","Type":"ContainerDied","Data":"460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943"} Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.621639 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"dcc3fed4-8bcf-49bf-8d78-68f038d20e9d","Type":"ContainerDied","Data":"06a260d1fc3f38dece8f23d7987624e148f58c59f2a182d0072fcfafe253ab67"} Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.621657 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.652797 4774 scope.go:117] "RemoveContainer" containerID="0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.662495 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.673637 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.673870 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2c7dg\" (UniqueName: \"kubernetes.io/projected/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-kube-api-access-2c7dg\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.673997 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.674056 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.697334 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.705554 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.706063 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-log" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706089 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-log" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.706116 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-api" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706124 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-api" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.706138 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-metadata" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706146 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-metadata" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.706165 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" containerName="nova-manage" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706173 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" containerName="nova-manage" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.706197 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-log" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706205 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-log" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706654 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" containerName="nova-manage" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706690 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-metadata" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706702 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-log" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706715 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" containerName="nova-metadata-log" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.706729 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" containerName="nova-api-api" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.710576 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.721077 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.727148 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.742733 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.747279 4774 scope.go:117] "RemoveContainer" containerID="c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.764490 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2\": container with ID starting with c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2 not found: ID does not exist" containerID="c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.764549 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2"} err="failed to get container status \"c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2\": rpc error: code = NotFound desc = could not find container \"c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2\": container with ID starting with c4313cf1a7da65adc6ff734acf348ea8d70e1c582d80a1136a3009ee60deeda2 not found: ID does not exist" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.764584 4774 scope.go:117] "RemoveContainer" containerID="0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.767418 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.771235 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a\": container with ID starting with 0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a not found: ID does not exist" containerID="0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.771269 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a"} err="failed to get container status \"0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a\": rpc error: code = NotFound desc = could not find container \"0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a\": container with ID starting with 0ec95f2dc5c23d7d095d5629677b18688f20d02d6929b9d42d2cba7355effe9a not found: ID does not exist" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.771295 4774 scope.go:117] "RemoveContainer" containerID="460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.775264 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-config-data\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.775448 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cf0f33b-88f6-477b-a268-6e1001603327-logs\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.775574 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.775720 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pszxv\" (UniqueName: \"kubernetes.io/projected/7cf0f33b-88f6-477b-a268-6e1001603327-kube-api-access-pszxv\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.782868 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.785008 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.790619 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.799148 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.812737 4774 scope.go:117] "RemoveContainer" containerID="5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.833338 4774 scope.go:117] "RemoveContainer" containerID="460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.834031 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943\": container with ID starting with 460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943 not found: ID does not exist" containerID="460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.834066 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943"} err="failed to get container status \"460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943\": rpc error: code = NotFound desc = could not find container \"460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943\": container with ID starting with 460c403429b2670e31343549a059b88ee36bd687875b4a68be72390549762943 not found: ID does not exist" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.834088 4774 scope.go:117] "RemoveContainer" containerID="5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b" Nov 21 15:37:18 crc kubenswrapper[4774]: E1121 15:37:18.834479 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b\": container with ID starting with 5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b not found: ID does not exist" containerID="5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.834662 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b"} err="failed to get container status \"5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b\": rpc error: code = NotFound desc = could not find container \"5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b\": container with ID starting with 5f323aea15202b44df4aecc227381a3b372f612fe810d46b704fb58d2340d51b not found: ID does not exist" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.869573 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.877648 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cf0f33b-88f6-477b-a268-6e1001603327-logs\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.877701 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.877745 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-config-data\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.877814 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pszxv\" (UniqueName: \"kubernetes.io/projected/7cf0f33b-88f6-477b-a268-6e1001603327-kube-api-access-pszxv\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.877889 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c11e5c-84cc-44dd-a229-dc16f75f9183-logs\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.877938 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dgnl\" (UniqueName: \"kubernetes.io/projected/b4c11e5c-84cc-44dd-a229-dc16f75f9183-kube-api-access-2dgnl\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.878032 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.878082 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-config-data\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.878211 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cf0f33b-88f6-477b-a268-6e1001603327-logs\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.883083 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.883424 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-config-data\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.897785 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pszxv\" (UniqueName: \"kubernetes.io/projected/7cf0f33b-88f6-477b-a268-6e1001603327-kube-api-access-pszxv\") pod \"nova-api-0\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " pod="openstack/nova-api-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.979461 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwvz5\" (UniqueName: \"kubernetes.io/projected/9c4d42b3-28e8-476c-9639-b1c7010b3909-kube-api-access-bwvz5\") pod \"9c4d42b3-28e8-476c-9639-b1c7010b3909\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.979791 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-config-data\") pod \"9c4d42b3-28e8-476c-9639-b1c7010b3909\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.979944 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-combined-ca-bundle\") pod \"9c4d42b3-28e8-476c-9639-b1c7010b3909\" (UID: \"9c4d42b3-28e8-476c-9639-b1c7010b3909\") " Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.980274 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-config-data\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.980359 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c11e5c-84cc-44dd-a229-dc16f75f9183-logs\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.980416 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dgnl\" (UniqueName: \"kubernetes.io/projected/b4c11e5c-84cc-44dd-a229-dc16f75f9183-kube-api-access-2dgnl\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.980465 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.981043 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c11e5c-84cc-44dd-a229-dc16f75f9183-logs\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.983182 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c4d42b3-28e8-476c-9639-b1c7010b3909-kube-api-access-bwvz5" (OuterVolumeSpecName: "kube-api-access-bwvz5") pod "9c4d42b3-28e8-476c-9639-b1c7010b3909" (UID: "9c4d42b3-28e8-476c-9639-b1c7010b3909"). InnerVolumeSpecName "kube-api-access-bwvz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.984261 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:18 crc kubenswrapper[4774]: I1121 15:37:18.984374 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-config-data\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:18.999845 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dgnl\" (UniqueName: \"kubernetes.io/projected/b4c11e5c-84cc-44dd-a229-dc16f75f9183-kube-api-access-2dgnl\") pod \"nova-metadata-0\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " pod="openstack/nova-metadata-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.007281 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c4d42b3-28e8-476c-9639-b1c7010b3909" (UID: "9c4d42b3-28e8-476c-9639-b1c7010b3909"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.008245 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-config-data" (OuterVolumeSpecName: "config-data") pod "9c4d42b3-28e8-476c-9639-b1c7010b3909" (UID: "9c4d42b3-28e8-476c-9639-b1c7010b3909"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.046277 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.085303 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwvz5\" (UniqueName: \"kubernetes.io/projected/9c4d42b3-28e8-476c-9639-b1c7010b3909-kube-api-access-bwvz5\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.085344 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.085357 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c4d42b3-28e8-476c-9639-b1c7010b3909-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.110875 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.524569 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:37:19 crc kubenswrapper[4774]: W1121 15:37:19.524805 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7cf0f33b_88f6_477b_a268_6e1001603327.slice/crio-a2aaaa7ce57c97c27356c9322708d7b7c952eed67cb3f4c0b83a592db6963cbd WatchSource:0}: Error finding container a2aaaa7ce57c97c27356c9322708d7b7c952eed67cb3f4c0b83a592db6963cbd: Status 404 returned error can't find the container with id a2aaaa7ce57c97c27356c9322708d7b7c952eed67cb3f4c0b83a592db6963cbd Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.608393 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.634346 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.636841 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"9c4d42b3-28e8-476c-9639-b1c7010b3909","Type":"ContainerDied","Data":"0c65a433f7affb6108aa892497b67dd80394ffd996101f8151c6cb0308768c35"} Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.636954 4774 scope.go:117] "RemoveContainer" containerID="a207e96daede4ef1928638b1d136c112dea843de89d64d43a0bdd54c495e8bb8" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.641639 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c11e5c-84cc-44dd-a229-dc16f75f9183","Type":"ContainerStarted","Data":"7befa88788d91d656f473134ccf36ac181e628957768f974d7a777e70e5fc262"} Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.647934 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cf0f33b-88f6-477b-a268-6e1001603327","Type":"ContainerStarted","Data":"a2aaaa7ce57c97c27356c9322708d7b7c952eed67cb3f4c0b83a592db6963cbd"} Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.692410 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.715770 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.723196 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:19 crc kubenswrapper[4774]: E1121 15:37:19.723665 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c4d42b3-28e8-476c-9639-b1c7010b3909" containerName="nova-scheduler-scheduler" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.723684 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c4d42b3-28e8-476c-9639-b1c7010b3909" containerName="nova-scheduler-scheduler" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.723977 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c4d42b3-28e8-476c-9639-b1c7010b3909" containerName="nova-scheduler-scheduler" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.724604 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.729025 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.732368 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.797797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.798070 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-config-data\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.798188 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwj7l\" (UniqueName: \"kubernetes.io/projected/42e7d998-cb9c-42b2-8727-82d1166f7291-kube-api-access-gwj7l\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.899679 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwj7l\" (UniqueName: \"kubernetes.io/projected/42e7d998-cb9c-42b2-8727-82d1166f7291-kube-api-access-gwj7l\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.899758 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.899868 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-config-data\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.906007 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-config-data\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.906273 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:19 crc kubenswrapper[4774]: I1121 15:37:19.916398 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwj7l\" (UniqueName: \"kubernetes.io/projected/42e7d998-cb9c-42b2-8727-82d1166f7291-kube-api-access-gwj7l\") pod \"nova-scheduler-0\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " pod="openstack/nova-scheduler-0" Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.057538 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.105264 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4496cb1d-522f-46e5-aedb-f1491b5e938c" path="/var/lib/kubelet/pods/4496cb1d-522f-46e5-aedb-f1491b5e938c/volumes" Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.105841 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c4d42b3-28e8-476c-9639-b1c7010b3909" path="/var/lib/kubelet/pods/9c4d42b3-28e8-476c-9639-b1c7010b3909/volumes" Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.106341 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcc3fed4-8bcf-49bf-8d78-68f038d20e9d" path="/var/lib/kubelet/pods/dcc3fed4-8bcf-49bf-8d78-68f038d20e9d/volumes" Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.536773 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.660191 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c11e5c-84cc-44dd-a229-dc16f75f9183","Type":"ContainerStarted","Data":"30cfcf12060cca9451eeaab0a6ca91843ea566132672af38529e0ceb71b6585c"} Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.660247 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c11e5c-84cc-44dd-a229-dc16f75f9183","Type":"ContainerStarted","Data":"54e23ad3b0355e055e1808222061ed230d33af67576cbf96bb72cc97b47c3d40"} Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.661945 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"42e7d998-cb9c-42b2-8727-82d1166f7291","Type":"ContainerStarted","Data":"d217f37bba305d19c7b6e05abd4901effb9f689ac6ef9569afb2adc3991447f1"} Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.663717 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cf0f33b-88f6-477b-a268-6e1001603327","Type":"ContainerStarted","Data":"6caa185b9bdcaa5b7ffe62e2f54d287b7feeac7c36eee315137b6b4c3eacaa37"} Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.663852 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cf0f33b-88f6-477b-a268-6e1001603327","Type":"ContainerStarted","Data":"4212129adf2e55f6daeae002e98addf31fc603242658a950c54d2ffae7e04a88"} Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.678601 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.678584731 podStartE2EDuration="2.678584731s" podCreationTimestamp="2025-11-21 15:37:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:20.678075577 +0000 UTC m=+5631.330274836" watchObservedRunningTime="2025-11-21 15:37:20.678584731 +0000 UTC m=+5631.330783990" Nov 21 15:37:20 crc kubenswrapper[4774]: I1121 15:37:20.708472 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.708449933 podStartE2EDuration="2.708449933s" podCreationTimestamp="2025-11-21 15:37:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:20.702554435 +0000 UTC m=+5631.354753724" watchObservedRunningTime="2025-11-21 15:37:20.708449933 +0000 UTC m=+5631.360649192" Nov 21 15:37:21 crc kubenswrapper[4774]: I1121 15:37:21.673222 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"42e7d998-cb9c-42b2-8727-82d1166f7291","Type":"ContainerStarted","Data":"7b0f53413236437273b291bdcb17f3c75a24d41889d6b532e6a697e2e60d182c"} Nov 21 15:37:21 crc kubenswrapper[4774]: I1121 15:37:21.715171 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.71514853 podStartE2EDuration="2.71514853s" podCreationTimestamp="2025-11-21 15:37:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:21.713235726 +0000 UTC m=+5632.365434985" watchObservedRunningTime="2025-11-21 15:37:21.71514853 +0000 UTC m=+5632.367347809" Nov 21 15:37:24 crc kubenswrapper[4774]: I1121 15:37:24.111474 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:37:24 crc kubenswrapper[4774]: I1121 15:37:24.112290 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:37:25 crc kubenswrapper[4774]: I1121 15:37:25.058489 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.048370 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.048806 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.111256 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.111301 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.601049 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.601118 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.601167 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.608064 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4849e8220ea36f19f58def5dba0778aa648235f180867dd4feddda2e2ae19099"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.608135 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://4849e8220ea36f19f58def5dba0778aa648235f180867dd4feddda2e2ae19099" gracePeriod=600 Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.758862 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="4849e8220ea36f19f58def5dba0778aa648235f180867dd4feddda2e2ae19099" exitCode=0 Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.758930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"4849e8220ea36f19f58def5dba0778aa648235f180867dd4feddda2e2ae19099"} Nov 21 15:37:29 crc kubenswrapper[4774]: I1121 15:37:29.759275 4774 scope.go:117] "RemoveContainer" containerID="8a5100460e8d9a87013bb937aacdd4b3e15d6ae75567c6ea2d1d5b1e543659dc" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.058516 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.088006 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.132037 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.71:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.132185 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.71:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.214065 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.72:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.214068 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.72:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.776538 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70"} Nov 21 15:37:30 crc kubenswrapper[4774]: I1121 15:37:30.808308 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.052518 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.053094 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.053598 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.053625 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.057924 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.058746 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.114983 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.117093 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.117930 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.259960 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c9f988f55-pw27d"] Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.269360 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.275380 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9f988f55-pw27d"] Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.312033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.312131 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-config\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.312180 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-src47\" (UniqueName: \"kubernetes.io/projected/859d895c-32e3-412e-89d7-ee321c45a7fe-kube-api-access-src47\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.312246 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.312309 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-dns-svc\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.413855 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.413967 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-dns-svc\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.414028 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.414090 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-config\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.414122 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-src47\" (UniqueName: \"kubernetes.io/projected/859d895c-32e3-412e-89d7-ee321c45a7fe-kube-api-access-src47\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.414926 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.415027 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.415298 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-config\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.415412 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-dns-svc\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.436024 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-src47\" (UniqueName: \"kubernetes.io/projected/859d895c-32e3-412e-89d7-ee321c45a7fe-kube-api-access-src47\") pod \"dnsmasq-dns-7c9f988f55-pw27d\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.593984 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:39 crc kubenswrapper[4774]: I1121 15:37:39.874697 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 15:37:40 crc kubenswrapper[4774]: I1121 15:37:40.110298 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9f988f55-pw27d"] Nov 21 15:37:40 crc kubenswrapper[4774]: I1121 15:37:40.877931 4774 generic.go:334] "Generic (PLEG): container finished" podID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerID="dcb501b455e91a226563f55d1fd6d9404ef8342107aaced2f21f1dd7d08af0b8" exitCode=0 Nov 21 15:37:40 crc kubenswrapper[4774]: I1121 15:37:40.879618 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" event={"ID":"859d895c-32e3-412e-89d7-ee321c45a7fe","Type":"ContainerDied","Data":"dcb501b455e91a226563f55d1fd6d9404ef8342107aaced2f21f1dd7d08af0b8"} Nov 21 15:37:40 crc kubenswrapper[4774]: I1121 15:37:40.879651 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" event={"ID":"859d895c-32e3-412e-89d7-ee321c45a7fe","Type":"ContainerStarted","Data":"36b8eae4a84fc612cba5fad682ee918b405babf4fda648c2361738a38283a664"} Nov 21 15:37:41 crc kubenswrapper[4774]: I1121 15:37:41.886993 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" event={"ID":"859d895c-32e3-412e-89d7-ee321c45a7fe","Type":"ContainerStarted","Data":"bcc91eca8a5e0bba0e5e56e8d5e4b1a8e2ed67fdec89e6d3fcce007474476c2d"} Nov 21 15:37:41 crc kubenswrapper[4774]: I1121 15:37:41.912996 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" podStartSLOduration=2.912977846 podStartE2EDuration="2.912977846s" podCreationTimestamp="2025-11-21 15:37:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:41.905546105 +0000 UTC m=+5652.557745374" watchObservedRunningTime="2025-11-21 15:37:41.912977846 +0000 UTC m=+5652.565177105" Nov 21 15:37:42 crc kubenswrapper[4774]: I1121 15:37:42.900485 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:49 crc kubenswrapper[4774]: I1121 15:37:49.595999 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:37:49 crc kubenswrapper[4774]: I1121 15:37:49.658576 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6f5885c9-r95pq"] Nov 21 15:37:49 crc kubenswrapper[4774]: I1121 15:37:49.658873 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerName="dnsmasq-dns" containerID="cri-o://66123e6c311fdae3c77a271d72af3dab55c4fdc47c70c70737773017ff1dfcf7" gracePeriod=10 Nov 21 15:37:49 crc kubenswrapper[4774]: I1121 15:37:49.998741 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerID="66123e6c311fdae3c77a271d72af3dab55c4fdc47c70c70737773017ff1dfcf7" exitCode=0 Nov 21 15:37:49 crc kubenswrapper[4774]: I1121 15:37:49.998864 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" event={"ID":"dc14b10f-4cd2-4747-9137-39fb3f97ead4","Type":"ContainerDied","Data":"66123e6c311fdae3c77a271d72af3dab55c4fdc47c70c70737773017ff1dfcf7"} Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.163915 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.212793 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-nb\") pod \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.212938 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-dns-svc\") pod \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.213104 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkqwf\" (UniqueName: \"kubernetes.io/projected/dc14b10f-4cd2-4747-9137-39fb3f97ead4-kube-api-access-pkqwf\") pod \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.213149 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-sb\") pod \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.213183 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-config\") pod \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\" (UID: \"dc14b10f-4cd2-4747-9137-39fb3f97ead4\") " Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.218297 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc14b10f-4cd2-4747-9137-39fb3f97ead4-kube-api-access-pkqwf" (OuterVolumeSpecName: "kube-api-access-pkqwf") pod "dc14b10f-4cd2-4747-9137-39fb3f97ead4" (UID: "dc14b10f-4cd2-4747-9137-39fb3f97ead4"). InnerVolumeSpecName "kube-api-access-pkqwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.268886 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dc14b10f-4cd2-4747-9137-39fb3f97ead4" (UID: "dc14b10f-4cd2-4747-9137-39fb3f97ead4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.270524 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dc14b10f-4cd2-4747-9137-39fb3f97ead4" (UID: "dc14b10f-4cd2-4747-9137-39fb3f97ead4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.276759 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-config" (OuterVolumeSpecName: "config") pod "dc14b10f-4cd2-4747-9137-39fb3f97ead4" (UID: "dc14b10f-4cd2-4747-9137-39fb3f97ead4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.279388 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dc14b10f-4cd2-4747-9137-39fb3f97ead4" (UID: "dc14b10f-4cd2-4747-9137-39fb3f97ead4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.316253 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkqwf\" (UniqueName: \"kubernetes.io/projected/dc14b10f-4cd2-4747-9137-39fb3f97ead4-kube-api-access-pkqwf\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.316290 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.316301 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.316309 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:50 crc kubenswrapper[4774]: I1121 15:37:50.316317 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dc14b10f-4cd2-4747-9137-39fb3f97ead4-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.012102 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" event={"ID":"dc14b10f-4cd2-4747-9137-39fb3f97ead4","Type":"ContainerDied","Data":"6c5fad3c154418c462ea98b1b7b15fd177f09c980cad2a3786c47d060f95f9ce"} Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.012414 4774 scope.go:117] "RemoveContainer" containerID="66123e6c311fdae3c77a271d72af3dab55c4fdc47c70c70737773017ff1dfcf7" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.012573 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c6f5885c9-r95pq" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.060249 4774 scope.go:117] "RemoveContainer" containerID="c216711707970b6a58618e62fe7d83d2aac29efe8f1f83a791349a4921dfa20c" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.062785 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c6f5885c9-r95pq"] Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.071083 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c6f5885c9-r95pq"] Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.669325 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-sm9vk"] Nov 21 15:37:51 crc kubenswrapper[4774]: E1121 15:37:51.670171 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerName="init" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.670194 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerName="init" Nov 21 15:37:51 crc kubenswrapper[4774]: E1121 15:37:51.670214 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerName="dnsmasq-dns" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.670223 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerName="dnsmasq-dns" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.670446 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" containerName="dnsmasq-dns" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.671255 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.681476 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-sm9vk"] Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.741799 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54wjk\" (UniqueName: \"kubernetes.io/projected/b10741c8-7c49-45d6-ae01-cb50e0407267-kube-api-access-54wjk\") pod \"cinder-db-create-sm9vk\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.741966 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b10741c8-7c49-45d6-ae01-cb50e0407267-operator-scripts\") pod \"cinder-db-create-sm9vk\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.765308 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-a3cc-account-create-8698w"] Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.766444 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.768594 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.780999 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a3cc-account-create-8698w"] Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.844492 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54wjk\" (UniqueName: \"kubernetes.io/projected/b10741c8-7c49-45d6-ae01-cb50e0407267-kube-api-access-54wjk\") pod \"cinder-db-create-sm9vk\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.844602 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06f2978-f05e-410a-82b2-ef92434f5b93-operator-scripts\") pod \"cinder-a3cc-account-create-8698w\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.844637 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbr5p\" (UniqueName: \"kubernetes.io/projected/d06f2978-f05e-410a-82b2-ef92434f5b93-kube-api-access-lbr5p\") pod \"cinder-a3cc-account-create-8698w\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.844691 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b10741c8-7c49-45d6-ae01-cb50e0407267-operator-scripts\") pod \"cinder-db-create-sm9vk\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.845735 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b10741c8-7c49-45d6-ae01-cb50e0407267-operator-scripts\") pod \"cinder-db-create-sm9vk\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.866585 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54wjk\" (UniqueName: \"kubernetes.io/projected/b10741c8-7c49-45d6-ae01-cb50e0407267-kube-api-access-54wjk\") pod \"cinder-db-create-sm9vk\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.945518 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06f2978-f05e-410a-82b2-ef92434f5b93-operator-scripts\") pod \"cinder-a3cc-account-create-8698w\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.945563 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbr5p\" (UniqueName: \"kubernetes.io/projected/d06f2978-f05e-410a-82b2-ef92434f5b93-kube-api-access-lbr5p\") pod \"cinder-a3cc-account-create-8698w\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.946288 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06f2978-f05e-410a-82b2-ef92434f5b93-operator-scripts\") pod \"cinder-a3cc-account-create-8698w\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.961771 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbr5p\" (UniqueName: \"kubernetes.io/projected/d06f2978-f05e-410a-82b2-ef92434f5b93-kube-api-access-lbr5p\") pod \"cinder-a3cc-account-create-8698w\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:51 crc kubenswrapper[4774]: I1121 15:37:51.998302 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:52 crc kubenswrapper[4774]: I1121 15:37:52.089350 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:52 crc kubenswrapper[4774]: I1121 15:37:52.107997 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc14b10f-4cd2-4747-9137-39fb3f97ead4" path="/var/lib/kubelet/pods/dc14b10f-4cd2-4747-9137-39fb3f97ead4/volumes" Nov 21 15:37:52 crc kubenswrapper[4774]: I1121 15:37:52.458665 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-sm9vk"] Nov 21 15:37:52 crc kubenswrapper[4774]: I1121 15:37:52.553487 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a3cc-account-create-8698w"] Nov 21 15:37:52 crc kubenswrapper[4774]: W1121 15:37:52.563314 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd06f2978_f05e_410a_82b2_ef92434f5b93.slice/crio-d9cf34d57879c28c264afc41e06f0673ff477ba38eb2eaa0ce73c3b1da03fdbf WatchSource:0}: Error finding container d9cf34d57879c28c264afc41e06f0673ff477ba38eb2eaa0ce73c3b1da03fdbf: Status 404 returned error can't find the container with id d9cf34d57879c28c264afc41e06f0673ff477ba38eb2eaa0ce73c3b1da03fdbf Nov 21 15:37:53 crc kubenswrapper[4774]: I1121 15:37:53.030268 4774 generic.go:334] "Generic (PLEG): container finished" podID="b10741c8-7c49-45d6-ae01-cb50e0407267" containerID="b046e17a120a2ffc31c0fc4df7eec43593214fd5d3b2e5ecc13499c516b4cbbc" exitCode=0 Nov 21 15:37:53 crc kubenswrapper[4774]: I1121 15:37:53.030347 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sm9vk" event={"ID":"b10741c8-7c49-45d6-ae01-cb50e0407267","Type":"ContainerDied","Data":"b046e17a120a2ffc31c0fc4df7eec43593214fd5d3b2e5ecc13499c516b4cbbc"} Nov 21 15:37:53 crc kubenswrapper[4774]: I1121 15:37:53.030386 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sm9vk" event={"ID":"b10741c8-7c49-45d6-ae01-cb50e0407267","Type":"ContainerStarted","Data":"16fb9160b16649c0dbf177338569009afc5f1a520db634b86e778aa608c58da1"} Nov 21 15:37:53 crc kubenswrapper[4774]: I1121 15:37:53.033433 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a3cc-account-create-8698w" event={"ID":"d06f2978-f05e-410a-82b2-ef92434f5b93","Type":"ContainerStarted","Data":"2dca65d009d625af683cb089814d248bffd13cbf441bd14e97a305579256ba13"} Nov 21 15:37:53 crc kubenswrapper[4774]: I1121 15:37:53.033478 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a3cc-account-create-8698w" event={"ID":"d06f2978-f05e-410a-82b2-ef92434f5b93","Type":"ContainerStarted","Data":"d9cf34d57879c28c264afc41e06f0673ff477ba38eb2eaa0ce73c3b1da03fdbf"} Nov 21 15:37:53 crc kubenswrapper[4774]: I1121 15:37:53.066225 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-a3cc-account-create-8698w" podStartSLOduration=2.0662047550000002 podStartE2EDuration="2.066204755s" podCreationTimestamp="2025-11-21 15:37:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:53.060606756 +0000 UTC m=+5663.712806015" watchObservedRunningTime="2025-11-21 15:37:53.066204755 +0000 UTC m=+5663.718404014" Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.044923 4774 generic.go:334] "Generic (PLEG): container finished" podID="d06f2978-f05e-410a-82b2-ef92434f5b93" containerID="2dca65d009d625af683cb089814d248bffd13cbf441bd14e97a305579256ba13" exitCode=0 Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.045116 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a3cc-account-create-8698w" event={"ID":"d06f2978-f05e-410a-82b2-ef92434f5b93","Type":"ContainerDied","Data":"2dca65d009d625af683cb089814d248bffd13cbf441bd14e97a305579256ba13"} Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.352343 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.499342 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54wjk\" (UniqueName: \"kubernetes.io/projected/b10741c8-7c49-45d6-ae01-cb50e0407267-kube-api-access-54wjk\") pod \"b10741c8-7c49-45d6-ae01-cb50e0407267\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.499605 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b10741c8-7c49-45d6-ae01-cb50e0407267-operator-scripts\") pod \"b10741c8-7c49-45d6-ae01-cb50e0407267\" (UID: \"b10741c8-7c49-45d6-ae01-cb50e0407267\") " Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.500661 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b10741c8-7c49-45d6-ae01-cb50e0407267-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b10741c8-7c49-45d6-ae01-cb50e0407267" (UID: "b10741c8-7c49-45d6-ae01-cb50e0407267"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.507677 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b10741c8-7c49-45d6-ae01-cb50e0407267-kube-api-access-54wjk" (OuterVolumeSpecName: "kube-api-access-54wjk") pod "b10741c8-7c49-45d6-ae01-cb50e0407267" (UID: "b10741c8-7c49-45d6-ae01-cb50e0407267"). InnerVolumeSpecName "kube-api-access-54wjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.601916 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b10741c8-7c49-45d6-ae01-cb50e0407267-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:54 crc kubenswrapper[4774]: I1121 15:37:54.602211 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54wjk\" (UniqueName: \"kubernetes.io/projected/b10741c8-7c49-45d6-ae01-cb50e0407267-kube-api-access-54wjk\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.056091 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-sm9vk" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.056084 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-sm9vk" event={"ID":"b10741c8-7c49-45d6-ae01-cb50e0407267","Type":"ContainerDied","Data":"16fb9160b16649c0dbf177338569009afc5f1a520db634b86e778aa608c58da1"} Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.056138 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16fb9160b16649c0dbf177338569009afc5f1a520db634b86e778aa608c58da1" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.408338 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.520109 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbr5p\" (UniqueName: \"kubernetes.io/projected/d06f2978-f05e-410a-82b2-ef92434f5b93-kube-api-access-lbr5p\") pod \"d06f2978-f05e-410a-82b2-ef92434f5b93\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.520606 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06f2978-f05e-410a-82b2-ef92434f5b93-operator-scripts\") pod \"d06f2978-f05e-410a-82b2-ef92434f5b93\" (UID: \"d06f2978-f05e-410a-82b2-ef92434f5b93\") " Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.521476 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d06f2978-f05e-410a-82b2-ef92434f5b93-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d06f2978-f05e-410a-82b2-ef92434f5b93" (UID: "d06f2978-f05e-410a-82b2-ef92434f5b93"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.524591 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d06f2978-f05e-410a-82b2-ef92434f5b93-kube-api-access-lbr5p" (OuterVolumeSpecName: "kube-api-access-lbr5p") pod "d06f2978-f05e-410a-82b2-ef92434f5b93" (UID: "d06f2978-f05e-410a-82b2-ef92434f5b93"). InnerVolumeSpecName "kube-api-access-lbr5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.623278 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d06f2978-f05e-410a-82b2-ef92434f5b93-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:55 crc kubenswrapper[4774]: I1121 15:37:55.623316 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbr5p\" (UniqueName: \"kubernetes.io/projected/d06f2978-f05e-410a-82b2-ef92434f5b93-kube-api-access-lbr5p\") on node \"crc\" DevicePath \"\"" Nov 21 15:37:56 crc kubenswrapper[4774]: I1121 15:37:56.070303 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a3cc-account-create-8698w" event={"ID":"d06f2978-f05e-410a-82b2-ef92434f5b93","Type":"ContainerDied","Data":"d9cf34d57879c28c264afc41e06f0673ff477ba38eb2eaa0ce73c3b1da03fdbf"} Nov 21 15:37:56 crc kubenswrapper[4774]: I1121 15:37:56.070600 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9cf34d57879c28c264afc41e06f0673ff477ba38eb2eaa0ce73c3b1da03fdbf" Nov 21 15:37:56 crc kubenswrapper[4774]: I1121 15:37:56.070376 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a3cc-account-create-8698w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.023065 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-sw56w"] Nov 21 15:37:57 crc kubenswrapper[4774]: E1121 15:37:57.023430 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06f2978-f05e-410a-82b2-ef92434f5b93" containerName="mariadb-account-create" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.023441 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06f2978-f05e-410a-82b2-ef92434f5b93" containerName="mariadb-account-create" Nov 21 15:37:57 crc kubenswrapper[4774]: E1121 15:37:57.023483 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b10741c8-7c49-45d6-ae01-cb50e0407267" containerName="mariadb-database-create" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.023492 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b10741c8-7c49-45d6-ae01-cb50e0407267" containerName="mariadb-database-create" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.023665 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b10741c8-7c49-45d6-ae01-cb50e0407267" containerName="mariadb-database-create" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.023704 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d06f2978-f05e-410a-82b2-ef92434f5b93" containerName="mariadb-account-create" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.024372 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.026947 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-drp7l" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.029500 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.029521 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.046586 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sw56w"] Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.155794 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-combined-ca-bundle\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.155902 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-etc-machine-id\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.155927 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxlv4\" (UniqueName: \"kubernetes.io/projected/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-kube-api-access-nxlv4\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.155948 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-config-data\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.156163 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-scripts\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.156315 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-db-sync-config-data\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.258882 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-etc-machine-id\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.258785 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-etc-machine-id\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.258999 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxlv4\" (UniqueName: \"kubernetes.io/projected/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-kube-api-access-nxlv4\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.259023 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-config-data\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.259432 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-scripts\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.260010 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-db-sync-config-data\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.260154 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-combined-ca-bundle\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.263022 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-db-sync-config-data\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.271436 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-scripts\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.272962 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-combined-ca-bundle\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.274120 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-config-data\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.274617 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxlv4\" (UniqueName: \"kubernetes.io/projected/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-kube-api-access-nxlv4\") pod \"cinder-db-sync-sw56w\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.356803 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sw56w" Nov 21 15:37:57 crc kubenswrapper[4774]: W1121 15:37:57.803613 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50bee0fc_1a29_4ceb_9761_4c9fe4be1a1b.slice/crio-6f68f65ba66b0a3420428c60ebda8224341ae71428825017f37a54226c32e161 WatchSource:0}: Error finding container 6f68f65ba66b0a3420428c60ebda8224341ae71428825017f37a54226c32e161: Status 404 returned error can't find the container with id 6f68f65ba66b0a3420428c60ebda8224341ae71428825017f37a54226c32e161 Nov 21 15:37:57 crc kubenswrapper[4774]: I1121 15:37:57.807571 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-sw56w"] Nov 21 15:37:58 crc kubenswrapper[4774]: I1121 15:37:58.105248 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sw56w" event={"ID":"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b","Type":"ContainerStarted","Data":"6f68f65ba66b0a3420428c60ebda8224341ae71428825017f37a54226c32e161"} Nov 21 15:37:59 crc kubenswrapper[4774]: I1121 15:37:59.112668 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sw56w" event={"ID":"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b","Type":"ContainerStarted","Data":"88e9c7a8b405f5fd6532dec331dd429627d589bb4ff973b3770fb62f3cbc4e76"} Nov 21 15:37:59 crc kubenswrapper[4774]: I1121 15:37:59.132687 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-sw56w" podStartSLOduration=2.132669108 podStartE2EDuration="2.132669108s" podCreationTimestamp="2025-11-21 15:37:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:37:59.130890908 +0000 UTC m=+5669.783090167" watchObservedRunningTime="2025-11-21 15:37:59.132669108 +0000 UTC m=+5669.784868367" Nov 21 15:38:06 crc kubenswrapper[4774]: I1121 15:38:06.177890 4774 generic.go:334] "Generic (PLEG): container finished" podID="50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" containerID="88e9c7a8b405f5fd6532dec331dd429627d589bb4ff973b3770fb62f3cbc4e76" exitCode=0 Nov 21 15:38:06 crc kubenswrapper[4774]: I1121 15:38:06.178028 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sw56w" event={"ID":"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b","Type":"ContainerDied","Data":"88e9c7a8b405f5fd6532dec331dd429627d589bb4ff973b3770fb62f3cbc4e76"} Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.528169 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sw56w" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.590458 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-etc-machine-id\") pod \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.590599 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" (UID: "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.590656 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-config-data\") pod \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.591222 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxlv4\" (UniqueName: \"kubernetes.io/projected/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-kube-api-access-nxlv4\") pod \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.591319 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-db-sync-config-data\") pod \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.591428 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-scripts\") pod \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.591496 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-combined-ca-bundle\") pod \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\" (UID: \"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b\") " Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.592248 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.596598 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-kube-api-access-nxlv4" (OuterVolumeSpecName: "kube-api-access-nxlv4") pod "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" (UID: "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b"). InnerVolumeSpecName "kube-api-access-nxlv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.597081 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" (UID: "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.603005 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-scripts" (OuterVolumeSpecName: "scripts") pod "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" (UID: "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.633701 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" (UID: "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.645067 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-config-data" (OuterVolumeSpecName: "config-data") pod "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" (UID: "50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.694291 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.694559 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxlv4\" (UniqueName: \"kubernetes.io/projected/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-kube-api-access-nxlv4\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.694580 4774 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.694591 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:07 crc kubenswrapper[4774]: I1121 15:38:07.694602 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.200141 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-sw56w" event={"ID":"50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b","Type":"ContainerDied","Data":"6f68f65ba66b0a3420428c60ebda8224341ae71428825017f37a54226c32e161"} Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.200187 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f68f65ba66b0a3420428c60ebda8224341ae71428825017f37a54226c32e161" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.200253 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-sw56w" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.515940 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6574d9755f-vzbzr"] Nov 21 15:38:08 crc kubenswrapper[4774]: E1121 15:38:08.516733 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" containerName="cinder-db-sync" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.516777 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" containerName="cinder-db-sync" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.517028 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" containerName="cinder-db-sync" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.519300 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.542507 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6574d9755f-vzbzr"] Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.613961 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-sb\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.614053 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-dns-svc\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.614088 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-nb\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.614115 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-config\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.614145 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhpqt\" (UniqueName: \"kubernetes.io/projected/b3a600c2-6589-4fc3-a96a-2211c34f0c68-kube-api-access-rhpqt\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.641404 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.646021 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.649310 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.649740 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-drp7l" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.649960 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.650324 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.675924 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715069 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-scripts\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715128 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr7ws\" (UniqueName: \"kubernetes.io/projected/1c6cb818-68c1-403e-8617-825998ad04ed-kube-api-access-jr7ws\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715163 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-dns-svc\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715209 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-nb\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715231 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-config\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715258 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhpqt\" (UniqueName: \"kubernetes.io/projected/b3a600c2-6589-4fc3-a96a-2211c34f0c68-kube-api-access-rhpqt\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715286 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c6cb818-68c1-403e-8617-825998ad04ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715327 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715341 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715371 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-sb\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715386 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.715401 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c6cb818-68c1-403e-8617-825998ad04ed-logs\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.716143 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-dns-svc\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.716559 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-nb\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.716593 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-config\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.717160 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-sb\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.735976 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhpqt\" (UniqueName: \"kubernetes.io/projected/b3a600c2-6589-4fc3-a96a-2211c34f0c68-kube-api-access-rhpqt\") pod \"dnsmasq-dns-6574d9755f-vzbzr\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.817865 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr7ws\" (UniqueName: \"kubernetes.io/projected/1c6cb818-68c1-403e-8617-825998ad04ed-kube-api-access-jr7ws\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818040 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c6cb818-68c1-403e-8617-825998ad04ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818123 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818161 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c6cb818-68c1-403e-8617-825998ad04ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818167 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818303 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818334 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c6cb818-68c1-403e-8617-825998ad04ed-logs\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.818404 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-scripts\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.819052 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c6cb818-68c1-403e-8617-825998ad04ed-logs\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.821339 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-scripts\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.822411 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.827376 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.828131 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.835985 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr7ws\" (UniqueName: \"kubernetes.io/projected/1c6cb818-68c1-403e-8617-825998ad04ed-kube-api-access-jr7ws\") pod \"cinder-api-0\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " pod="openstack/cinder-api-0" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.845283 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:08 crc kubenswrapper[4774]: I1121 15:38:08.971613 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 15:38:09 crc kubenswrapper[4774]: I1121 15:38:09.369913 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6574d9755f-vzbzr"] Nov 21 15:38:09 crc kubenswrapper[4774]: I1121 15:38:09.598933 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:09 crc kubenswrapper[4774]: W1121 15:38:09.599380 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c6cb818_68c1_403e_8617_825998ad04ed.slice/crio-1513aa1917764b85a2189802d21b7db1e6f42635a7c1482665ca8c120da6520f WatchSource:0}: Error finding container 1513aa1917764b85a2189802d21b7db1e6f42635a7c1482665ca8c120da6520f: Status 404 returned error can't find the container with id 1513aa1917764b85a2189802d21b7db1e6f42635a7c1482665ca8c120da6520f Nov 21 15:38:10 crc kubenswrapper[4774]: I1121 15:38:10.225742 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1c6cb818-68c1-403e-8617-825998ad04ed","Type":"ContainerStarted","Data":"1513aa1917764b85a2189802d21b7db1e6f42635a7c1482665ca8c120da6520f"} Nov 21 15:38:10 crc kubenswrapper[4774]: I1121 15:38:10.227440 4774 generic.go:334] "Generic (PLEG): container finished" podID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerID="ca6ed49cafb923ea60a77da9a1d906fa2b68a9bf1c9ed47c71a243af72f36628" exitCode=0 Nov 21 15:38:10 crc kubenswrapper[4774]: I1121 15:38:10.227473 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" event={"ID":"b3a600c2-6589-4fc3-a96a-2211c34f0c68","Type":"ContainerDied","Data":"ca6ed49cafb923ea60a77da9a1d906fa2b68a9bf1c9ed47c71a243af72f36628"} Nov 21 15:38:10 crc kubenswrapper[4774]: I1121 15:38:10.227494 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" event={"ID":"b3a600c2-6589-4fc3-a96a-2211c34f0c68","Type":"ContainerStarted","Data":"a166851c1733d8ed2e221c4236ce331dd8b03a7afda5747d34c5b5143e7e6619"} Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.237455 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" event={"ID":"b3a600c2-6589-4fc3-a96a-2211c34f0c68","Type":"ContainerStarted","Data":"ab84a8f0e3bf4ddc97f72d6157665cbe2c843e88203e5f9ac91f8776afa67afc"} Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.237970 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.240388 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1c6cb818-68c1-403e-8617-825998ad04ed","Type":"ContainerStarted","Data":"7b9913c1dbedfeb35315bd8c4e930171af39bd6341aa4b26b42b9f943b7c6fcc"} Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.265606 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" podStartSLOduration=3.265581673 podStartE2EDuration="3.265581673s" podCreationTimestamp="2025-11-21 15:38:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:11.264639406 +0000 UTC m=+5681.916838665" watchObservedRunningTime="2025-11-21 15:38:11.265581673 +0000 UTC m=+5681.917780932" Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.870184 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.870686 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-log" containerID="cri-o://4212129adf2e55f6daeae002e98addf31fc603242658a950c54d2ffae7e04a88" gracePeriod=30 Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.870838 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-api" containerID="cri-o://6caa185b9bdcaa5b7ffe62e2f54d287b7feeac7c36eee315137b6b4c3eacaa37" gracePeriod=30 Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.877526 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.877733 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="42e7d998-cb9c-42b2-8727-82d1166f7291" containerName="nova-scheduler-scheduler" containerID="cri-o://7b0f53413236437273b291bdcb17f3c75a24d41889d6b532e6a697e2e60d182c" gracePeriod=30 Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.896131 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.896412 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-log" containerID="cri-o://54e23ad3b0355e055e1808222061ed230d33af67576cbf96bb72cc97b47c3d40" gracePeriod=30 Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.896513 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-metadata" containerID="cri-o://30cfcf12060cca9451eeaab0a6ca91843ea566132672af38529e0ceb71b6585c" gracePeriod=30 Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.905329 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.905589 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerName="nova-cell0-conductor-conductor" containerID="cri-o://7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" gracePeriod=30 Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.914106 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:38:11 crc kubenswrapper[4774]: I1121 15:38:11.914308 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="dc385c54-02aa-4582-924c-3bc67c99b870" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3" gracePeriod=30 Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.257963 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerID="54e23ad3b0355e055e1808222061ed230d33af67576cbf96bb72cc97b47c3d40" exitCode=143 Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.258304 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c11e5c-84cc-44dd-a229-dc16f75f9183","Type":"ContainerDied","Data":"54e23ad3b0355e055e1808222061ed230d33af67576cbf96bb72cc97b47c3d40"} Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.264555 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1c6cb818-68c1-403e-8617-825998ad04ed","Type":"ContainerStarted","Data":"655b784ea30c404876264cfa245b16fc0b846353916e22e09699d26e0c3047af"} Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.264677 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.273281 4774 generic.go:334] "Generic (PLEG): container finished" podID="7cf0f33b-88f6-477b-a268-6e1001603327" containerID="4212129adf2e55f6daeae002e98addf31fc603242658a950c54d2ffae7e04a88" exitCode=143 Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.274084 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cf0f33b-88f6-477b-a268-6e1001603327","Type":"ContainerDied","Data":"4212129adf2e55f6daeae002e98addf31fc603242658a950c54d2ffae7e04a88"} Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.287011 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.28699221 podStartE2EDuration="4.28699221s" podCreationTimestamp="2025-11-21 15:38:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:12.282417229 +0000 UTC m=+5682.934616498" watchObservedRunningTime="2025-11-21 15:38:12.28699221 +0000 UTC m=+5682.939191469" Nov 21 15:38:12 crc kubenswrapper[4774]: E1121 15:38:12.434925 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:12 crc kubenswrapper[4774]: E1121 15:38:12.436416 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:12 crc kubenswrapper[4774]: E1121 15:38:12.437612 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:12 crc kubenswrapper[4774]: E1121 15:38:12.437678 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerName="nova-cell0-conductor-conductor" Nov 21 15:38:12 crc kubenswrapper[4774]: I1121 15:38:12.853166 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.027965 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-combined-ca-bundle\") pod \"dc385c54-02aa-4582-924c-3bc67c99b870\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.028093 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-config-data\") pod \"dc385c54-02aa-4582-924c-3bc67c99b870\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.028215 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86ztl\" (UniqueName: \"kubernetes.io/projected/dc385c54-02aa-4582-924c-3bc67c99b870-kube-api-access-86ztl\") pod \"dc385c54-02aa-4582-924c-3bc67c99b870\" (UID: \"dc385c54-02aa-4582-924c-3bc67c99b870\") " Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.034077 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc385c54-02aa-4582-924c-3bc67c99b870-kube-api-access-86ztl" (OuterVolumeSpecName: "kube-api-access-86ztl") pod "dc385c54-02aa-4582-924c-3bc67c99b870" (UID: "dc385c54-02aa-4582-924c-3bc67c99b870"). InnerVolumeSpecName "kube-api-access-86ztl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.052716 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-config-data" (OuterVolumeSpecName: "config-data") pod "dc385c54-02aa-4582-924c-3bc67c99b870" (UID: "dc385c54-02aa-4582-924c-3bc67c99b870"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.056376 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc385c54-02aa-4582-924c-3bc67c99b870" (UID: "dc385c54-02aa-4582-924c-3bc67c99b870"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.130567 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.130623 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86ztl\" (UniqueName: \"kubernetes.io/projected/dc385c54-02aa-4582-924c-3bc67c99b870-kube-api-access-86ztl\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.130644 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc385c54-02aa-4582-924c-3bc67c99b870-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.283659 4774 generic.go:334] "Generic (PLEG): container finished" podID="dc385c54-02aa-4582-924c-3bc67c99b870" containerID="88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3" exitCode=0 Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.283727 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.283740 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dc385c54-02aa-4582-924c-3bc67c99b870","Type":"ContainerDied","Data":"88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3"} Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.283807 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"dc385c54-02aa-4582-924c-3bc67c99b870","Type":"ContainerDied","Data":"bac642411104fa859b55c4addd7867d551605da45f3cd24e850f5f41b64bb167"} Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.283868 4774 scope.go:117] "RemoveContainer" containerID="88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.307810 4774 scope.go:117] "RemoveContainer" containerID="88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3" Nov 21 15:38:13 crc kubenswrapper[4774]: E1121 15:38:13.308346 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3\": container with ID starting with 88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3 not found: ID does not exist" containerID="88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.308418 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3"} err="failed to get container status \"88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3\": rpc error: code = NotFound desc = could not find container \"88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3\": container with ID starting with 88e1975f51c9c0772aed568463574c1e1b8aa13d7e4a67c1f74acf167ff420c3 not found: ID does not exist" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.315412 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.322268 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.343389 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:38:13 crc kubenswrapper[4774]: E1121 15:38:13.343786 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc385c54-02aa-4582-924c-3bc67c99b870" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.343804 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc385c54-02aa-4582-924c-3bc67c99b870" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.344024 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc385c54-02aa-4582-924c-3bc67c99b870" containerName="nova-cell1-novncproxy-novncproxy" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.344606 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.346201 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.358790 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.435974 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.436339 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.436410 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9qt\" (UniqueName: \"kubernetes.io/projected/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-kube-api-access-kv9qt\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.538505 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.541528 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.541644 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9qt\" (UniqueName: \"kubernetes.io/projected/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-kube-api-access-kv9qt\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.545464 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.551114 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.564420 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9qt\" (UniqueName: \"kubernetes.io/projected/fadb611f-7092-4459-8b6a-3aeba1e8a7ac-kube-api-access-kv9qt\") pod \"nova-cell1-novncproxy-0\" (UID: \"fadb611f-7092-4459-8b6a-3aeba1e8a7ac\") " pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:13 crc kubenswrapper[4774]: I1121 15:38:13.669426 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.124996 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc385c54-02aa-4582-924c-3bc67c99b870" path="/var/lib/kubelet/pods/dc385c54-02aa-4582-924c-3bc67c99b870/volumes" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.128328 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.296972 4774 generic.go:334] "Generic (PLEG): container finished" podID="42e7d998-cb9c-42b2-8727-82d1166f7291" containerID="7b0f53413236437273b291bdcb17f3c75a24d41889d6b532e6a697e2e60d182c" exitCode=0 Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.297056 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"42e7d998-cb9c-42b2-8727-82d1166f7291","Type":"ContainerDied","Data":"7b0f53413236437273b291bdcb17f3c75a24d41889d6b532e6a697e2e60d182c"} Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.298043 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"42e7d998-cb9c-42b2-8727-82d1166f7291","Type":"ContainerDied","Data":"d217f37bba305d19c7b6e05abd4901effb9f689ac6ef9569afb2adc3991447f1"} Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.298112 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d217f37bba305d19c7b6e05abd4901effb9f689ac6ef9569afb2adc3991447f1" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.298660 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.302540 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fadb611f-7092-4459-8b6a-3aeba1e8a7ac","Type":"ContainerStarted","Data":"6eb4ad51818cf635daeaaf2915d55a1ea2f8ea35feae19a2f59069c24b131af5"} Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.463582 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwj7l\" (UniqueName: \"kubernetes.io/projected/42e7d998-cb9c-42b2-8727-82d1166f7291-kube-api-access-gwj7l\") pod \"42e7d998-cb9c-42b2-8727-82d1166f7291\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.463757 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-combined-ca-bundle\") pod \"42e7d998-cb9c-42b2-8727-82d1166f7291\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.463801 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-config-data\") pod \"42e7d998-cb9c-42b2-8727-82d1166f7291\" (UID: \"42e7d998-cb9c-42b2-8727-82d1166f7291\") " Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.469762 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42e7d998-cb9c-42b2-8727-82d1166f7291-kube-api-access-gwj7l" (OuterVolumeSpecName: "kube-api-access-gwj7l") pod "42e7d998-cb9c-42b2-8727-82d1166f7291" (UID: "42e7d998-cb9c-42b2-8727-82d1166f7291"). InnerVolumeSpecName "kube-api-access-gwj7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.490453 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-config-data" (OuterVolumeSpecName: "config-data") pod "42e7d998-cb9c-42b2-8727-82d1166f7291" (UID: "42e7d998-cb9c-42b2-8727-82d1166f7291"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.498736 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "42e7d998-cb9c-42b2-8727-82d1166f7291" (UID: "42e7d998-cb9c-42b2-8727-82d1166f7291"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.566370 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwj7l\" (UniqueName: \"kubernetes.io/projected/42e7d998-cb9c-42b2-8727-82d1166f7291-kube-api-access-gwj7l\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.566458 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:14 crc kubenswrapper[4774]: I1121 15:38:14.566474 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42e7d998-cb9c-42b2-8727-82d1166f7291-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.054381 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.72:8775/\": read tcp 10.217.0.2:46956->10.217.1.72:8775: read: connection reset by peer" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.054397 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.72:8775/\": read tcp 10.217.0.2:46942->10.217.1.72:8775: read: connection reset by peer" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.097701 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.097917 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="996312d6-6bfd-47a2-83a4-d43364658f94" containerName="nova-cell1-conductor-conductor" containerID="cri-o://d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5" gracePeriod=30 Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.314439 4774 generic.go:334] "Generic (PLEG): container finished" podID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerID="30cfcf12060cca9451eeaab0a6ca91843ea566132672af38529e0ceb71b6585c" exitCode=0 Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.314495 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c11e5c-84cc-44dd-a229-dc16f75f9183","Type":"ContainerDied","Data":"30cfcf12060cca9451eeaab0a6ca91843ea566132672af38529e0ceb71b6585c"} Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.325064 4774 generic.go:334] "Generic (PLEG): container finished" podID="7cf0f33b-88f6-477b-a268-6e1001603327" containerID="6caa185b9bdcaa5b7ffe62e2f54d287b7feeac7c36eee315137b6b4c3eacaa37" exitCode=0 Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.325144 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cf0f33b-88f6-477b-a268-6e1001603327","Type":"ContainerDied","Data":"6caa185b9bdcaa5b7ffe62e2f54d287b7feeac7c36eee315137b6b4c3eacaa37"} Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.326955 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.328860 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"fadb611f-7092-4459-8b6a-3aeba1e8a7ac","Type":"ContainerStarted","Data":"fcffad3dae1c950291c441e19cf4557e97c2ce3da9698d574c56c538f7301bea"} Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.346578 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.346560176 podStartE2EDuration="2.346560176s" podCreationTimestamp="2025-11-21 15:38:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:15.341355158 +0000 UTC m=+5685.993554427" watchObservedRunningTime="2025-11-21 15:38:15.346560176 +0000 UTC m=+5685.998759435" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.375197 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.388619 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.396930 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:38:15 crc kubenswrapper[4774]: E1121 15:38:15.397540 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42e7d998-cb9c-42b2-8727-82d1166f7291" containerName="nova-scheduler-scheduler" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.397569 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="42e7d998-cb9c-42b2-8727-82d1166f7291" containerName="nova-scheduler-scheduler" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.397859 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="42e7d998-cb9c-42b2-8727-82d1166f7291" containerName="nova-scheduler-scheduler" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.398713 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.401072 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.407009 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.586015 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-config-data\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.586209 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lww96\" (UniqueName: \"kubernetes.io/projected/19e323d5-4bb6-4769-aab0-fe396014cc08-kube-api-access-lww96\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.586306 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.633805 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.639429 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.689138 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-config-data\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.689275 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lww96\" (UniqueName: \"kubernetes.io/projected/19e323d5-4bb6-4769-aab0-fe396014cc08-kube-api-access-lww96\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.689329 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.695297 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-config-data\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.708982 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lww96\" (UniqueName: \"kubernetes.io/projected/19e323d5-4bb6-4769-aab0-fe396014cc08-kube-api-access-lww96\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.717017 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " pod="openstack/nova-scheduler-0" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791242 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-combined-ca-bundle\") pod \"7cf0f33b-88f6-477b-a268-6e1001603327\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791310 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-config-data\") pod \"7cf0f33b-88f6-477b-a268-6e1001603327\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791388 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c11e5c-84cc-44dd-a229-dc16f75f9183-logs\") pod \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791474 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-combined-ca-bundle\") pod \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791564 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pszxv\" (UniqueName: \"kubernetes.io/projected/7cf0f33b-88f6-477b-a268-6e1001603327-kube-api-access-pszxv\") pod \"7cf0f33b-88f6-477b-a268-6e1001603327\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791595 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-config-data\") pod \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791683 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dgnl\" (UniqueName: \"kubernetes.io/projected/b4c11e5c-84cc-44dd-a229-dc16f75f9183-kube-api-access-2dgnl\") pod \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\" (UID: \"b4c11e5c-84cc-44dd-a229-dc16f75f9183\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.791718 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cf0f33b-88f6-477b-a268-6e1001603327-logs\") pod \"7cf0f33b-88f6-477b-a268-6e1001603327\" (UID: \"7cf0f33b-88f6-477b-a268-6e1001603327\") " Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.792082 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4c11e5c-84cc-44dd-a229-dc16f75f9183-logs" (OuterVolumeSpecName: "logs") pod "b4c11e5c-84cc-44dd-a229-dc16f75f9183" (UID: "b4c11e5c-84cc-44dd-a229-dc16f75f9183"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.792501 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cf0f33b-88f6-477b-a268-6e1001603327-logs" (OuterVolumeSpecName: "logs") pod "7cf0f33b-88f6-477b-a268-6e1001603327" (UID: "7cf0f33b-88f6-477b-a268-6e1001603327"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.792520 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b4c11e5c-84cc-44dd-a229-dc16f75f9183-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.805418 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4c11e5c-84cc-44dd-a229-dc16f75f9183-kube-api-access-2dgnl" (OuterVolumeSpecName: "kube-api-access-2dgnl") pod "b4c11e5c-84cc-44dd-a229-dc16f75f9183" (UID: "b4c11e5c-84cc-44dd-a229-dc16f75f9183"). InnerVolumeSpecName "kube-api-access-2dgnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.809131 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cf0f33b-88f6-477b-a268-6e1001603327-kube-api-access-pszxv" (OuterVolumeSpecName: "kube-api-access-pszxv") pod "7cf0f33b-88f6-477b-a268-6e1001603327" (UID: "7cf0f33b-88f6-477b-a268-6e1001603327"). InnerVolumeSpecName "kube-api-access-pszxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.821544 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7cf0f33b-88f6-477b-a268-6e1001603327" (UID: "7cf0f33b-88f6-477b-a268-6e1001603327"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.823280 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4c11e5c-84cc-44dd-a229-dc16f75f9183" (UID: "b4c11e5c-84cc-44dd-a229-dc16f75f9183"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.835388 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-config-data" (OuterVolumeSpecName: "config-data") pod "b4c11e5c-84cc-44dd-a229-dc16f75f9183" (UID: "b4c11e5c-84cc-44dd-a229-dc16f75f9183"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.850667 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-config-data" (OuterVolumeSpecName: "config-data") pod "7cf0f33b-88f6-477b-a268-6e1001603327" (UID: "7cf0f33b-88f6-477b-a268-6e1001603327"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894329 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894373 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pszxv\" (UniqueName: \"kubernetes.io/projected/7cf0f33b-88f6-477b-a268-6e1001603327-kube-api-access-pszxv\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894390 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4c11e5c-84cc-44dd-a229-dc16f75f9183-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894402 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dgnl\" (UniqueName: \"kubernetes.io/projected/b4c11e5c-84cc-44dd-a229-dc16f75f9183-kube-api-access-2dgnl\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894417 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7cf0f33b-88f6-477b-a268-6e1001603327-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894427 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: I1121 15:38:15.894436 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7cf0f33b-88f6-477b-a268-6e1001603327-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:15 crc kubenswrapper[4774]: E1121 15:38:15.999547 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.000974 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.002364 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.002421 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="996312d6-6bfd-47a2-83a4-d43364658f94" containerName="nova-cell1-conductor-conductor" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.014228 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.111516 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42e7d998-cb9c-42b2-8727-82d1166f7291" path="/var/lib/kubelet/pods/42e7d998-cb9c-42b2-8727-82d1166f7291/volumes" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.336960 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"7cf0f33b-88f6-477b-a268-6e1001603327","Type":"ContainerDied","Data":"a2aaaa7ce57c97c27356c9322708d7b7c952eed67cb3f4c0b83a592db6963cbd"} Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.337254 4774 scope.go:117] "RemoveContainer" containerID="6caa185b9bdcaa5b7ffe62e2f54d287b7feeac7c36eee315137b6b4c3eacaa37" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.336983 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.340444 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b4c11e5c-84cc-44dd-a229-dc16f75f9183","Type":"ContainerDied","Data":"7befa88788d91d656f473134ccf36ac181e628957768f974d7a777e70e5fc262"} Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.340493 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.370550 4774 scope.go:117] "RemoveContainer" containerID="4212129adf2e55f6daeae002e98addf31fc603242658a950c54d2ffae7e04a88" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.375792 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.390087 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.412000 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.425113 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432229 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.432663 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-api" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432679 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-api" Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.432696 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-metadata" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432704 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-metadata" Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.432728 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-log" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432734 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-log" Nov 21 15:38:16 crc kubenswrapper[4774]: E1121 15:38:16.432754 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-log" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432760 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-log" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432945 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-api" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432964 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-metadata" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432977 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" containerName="nova-metadata-log" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.432988 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" containerName="nova-api-log" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.433999 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.437004 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.440866 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.442357 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.446841 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.453670 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.465884 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.506526 4774 scope.go:117] "RemoveContainer" containerID="30cfcf12060cca9451eeaab0a6ca91843ea566132672af38529e0ceb71b6585c" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.544420 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.553883 4774 scope.go:117] "RemoveContainer" containerID="54e23ad3b0355e055e1808222061ed230d33af67576cbf96bb72cc97b47c3d40" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.613961 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5978983d-ed01-4414-a4ac-bd04b249957b-logs\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614332 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-config-data\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614410 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72jgh\" (UniqueName: \"kubernetes.io/projected/5978983d-ed01-4414-a4ac-bd04b249957b-kube-api-access-72jgh\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614438 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21fde3a8-382a-42fe-863a-2c02cb7ccc90-logs\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614598 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614755 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614839 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm8q7\" (UniqueName: \"kubernetes.io/projected/21fde3a8-382a-42fe-863a-2c02cb7ccc90-kube-api-access-zm8q7\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.614877 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-config-data\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.717496 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5978983d-ed01-4414-a4ac-bd04b249957b-logs\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.717631 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-config-data\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.717758 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72jgh\" (UniqueName: \"kubernetes.io/projected/5978983d-ed01-4414-a4ac-bd04b249957b-kube-api-access-72jgh\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.717805 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21fde3a8-382a-42fe-863a-2c02cb7ccc90-logs\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.721150 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5978983d-ed01-4414-a4ac-bd04b249957b-logs\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.720802 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21fde3a8-382a-42fe-863a-2c02cb7ccc90-logs\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.721239 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.722061 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.722860 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm8q7\" (UniqueName: \"kubernetes.io/projected/21fde3a8-382a-42fe-863a-2c02cb7ccc90-kube-api-access-zm8q7\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.722913 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-config-data\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.727539 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-config-data\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.727563 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.728320 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.729594 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-config-data\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.739060 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72jgh\" (UniqueName: \"kubernetes.io/projected/5978983d-ed01-4414-a4ac-bd04b249957b-kube-api-access-72jgh\") pod \"nova-metadata-0\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " pod="openstack/nova-metadata-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.744077 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm8q7\" (UniqueName: \"kubernetes.io/projected/21fde3a8-382a-42fe-863a-2c02cb7ccc90-kube-api-access-zm8q7\") pod \"nova-api-0\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.817324 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 15:38:16 crc kubenswrapper[4774]: I1121 15:38:16.831449 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.303405 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.347345 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.357924 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19e323d5-4bb6-4769-aab0-fe396014cc08","Type":"ContainerStarted","Data":"080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba"} Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.357969 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19e323d5-4bb6-4769-aab0-fe396014cc08","Type":"ContainerStarted","Data":"6aa0c5ea12eb4cbc6504e07c0e24e9a2aa448c1b6c268cb945b7411d0b9d91a9"} Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.372733 4774 generic.go:334] "Generic (PLEG): container finished" podID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" exitCode=0 Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.372795 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a9ab3689-3218-47c5-a72c-7e187b48fc37","Type":"ContainerDied","Data":"7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b"} Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.377420 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"21fde3a8-382a-42fe-863a-2c02cb7ccc90","Type":"ContainerStarted","Data":"4ad7636096d84c190b3275405d0126fd4a40f4a23a4775c0eb6d7c073c3a1b7d"} Nov 21 15:38:17 crc kubenswrapper[4774]: W1121 15:38:17.378808 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5978983d_ed01_4414_a4ac_bd04b249957b.slice/crio-7791cceee7af484b3d3d1b74ab4e94e05edfe0aba5156fc54188ad7b284cca16 WatchSource:0}: Error finding container 7791cceee7af484b3d3d1b74ab4e94e05edfe0aba5156fc54188ad7b284cca16: Status 404 returned error can't find the container with id 7791cceee7af484b3d3d1b74ab4e94e05edfe0aba5156fc54188ad7b284cca16 Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.387367 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.387349951 podStartE2EDuration="2.387349951s" podCreationTimestamp="2025-11-21 15:38:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:17.38064778 +0000 UTC m=+5688.032847039" watchObservedRunningTime="2025-11-21 15:38:17.387349951 +0000 UTC m=+5688.039549210" Nov 21 15:38:17 crc kubenswrapper[4774]: E1121 15:38:17.434597 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b is running failed: container process not found" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:17 crc kubenswrapper[4774]: E1121 15:38:17.437083 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b is running failed: container process not found" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:17 crc kubenswrapper[4774]: E1121 15:38:17.437543 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b is running failed: container process not found" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 15:38:17 crc kubenswrapper[4774]: E1121 15:38:17.437600 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b is running failed: container process not found" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerName="nova-cell0-conductor-conductor" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.526164 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.637372 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6fv8\" (UniqueName: \"kubernetes.io/projected/a9ab3689-3218-47c5-a72c-7e187b48fc37-kube-api-access-s6fv8\") pod \"a9ab3689-3218-47c5-a72c-7e187b48fc37\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.637469 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-combined-ca-bundle\") pod \"a9ab3689-3218-47c5-a72c-7e187b48fc37\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.637505 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-config-data\") pod \"a9ab3689-3218-47c5-a72c-7e187b48fc37\" (UID: \"a9ab3689-3218-47c5-a72c-7e187b48fc37\") " Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.647765 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ab3689-3218-47c5-a72c-7e187b48fc37-kube-api-access-s6fv8" (OuterVolumeSpecName: "kube-api-access-s6fv8") pod "a9ab3689-3218-47c5-a72c-7e187b48fc37" (UID: "a9ab3689-3218-47c5-a72c-7e187b48fc37"). InnerVolumeSpecName "kube-api-access-s6fv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.665118 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-config-data" (OuterVolumeSpecName: "config-data") pod "a9ab3689-3218-47c5-a72c-7e187b48fc37" (UID: "a9ab3689-3218-47c5-a72c-7e187b48fc37"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.671079 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9ab3689-3218-47c5-a72c-7e187b48fc37" (UID: "a9ab3689-3218-47c5-a72c-7e187b48fc37"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.739883 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6fv8\" (UniqueName: \"kubernetes.io/projected/a9ab3689-3218-47c5-a72c-7e187b48fc37-kube-api-access-s6fv8\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.739916 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:17 crc kubenswrapper[4774]: I1121 15:38:17.739925 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ab3689-3218-47c5-a72c-7e187b48fc37-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.106583 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cf0f33b-88f6-477b-a268-6e1001603327" path="/var/lib/kubelet/pods/7cf0f33b-88f6-477b-a268-6e1001603327/volumes" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.107208 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4c11e5c-84cc-44dd-a229-dc16f75f9183" path="/var/lib/kubelet/pods/b4c11e5c-84cc-44dd-a229-dc16f75f9183/volumes" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.386497 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5978983d-ed01-4414-a4ac-bd04b249957b","Type":"ContainerStarted","Data":"9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af"} Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.386703 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5978983d-ed01-4414-a4ac-bd04b249957b","Type":"ContainerStarted","Data":"7791cceee7af484b3d3d1b74ab4e94e05edfe0aba5156fc54188ad7b284cca16"} Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.387717 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a9ab3689-3218-47c5-a72c-7e187b48fc37","Type":"ContainerDied","Data":"3b5d0119d9b19c292495a4e1650291fe70302eedf253883f4ed1871fd1f906fc"} Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.387742 4774 scope.go:117] "RemoveContainer" containerID="7ef2235461a651834b6eacb02362d2370d4b00141c0babb9b06eb5b43cf3f73b" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.387840 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.415519 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"21fde3a8-382a-42fe-863a-2c02cb7ccc90","Type":"ContainerStarted","Data":"68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e"} Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.424592 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.442799 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.460469 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:38:18 crc kubenswrapper[4774]: E1121 15:38:18.460896 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerName="nova-cell0-conductor-conductor" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.460914 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerName="nova-cell0-conductor-conductor" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.461085 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" containerName="nova-cell0-conductor-conductor" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.461672 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.469793 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.478666 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.556992 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.557129 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nxw9\" (UniqueName: \"kubernetes.io/projected/fce1c2fe-593d-489c-a4ee-79b2be128d8b-kube-api-access-4nxw9\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.557385 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.658792 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.658915 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nxw9\" (UniqueName: \"kubernetes.io/projected/fce1c2fe-593d-489c-a4ee-79b2be128d8b-kube-api-access-4nxw9\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.658960 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.669992 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.674169 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.680433 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.686459 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nxw9\" (UniqueName: \"kubernetes.io/projected/fce1c2fe-593d-489c-a4ee-79b2be128d8b-kube-api-access-4nxw9\") pod \"nova-cell0-conductor-0\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.780348 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.847017 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.918380 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9f988f55-pw27d"] Nov 21 15:38:18 crc kubenswrapper[4774]: I1121 15:38:18.918659 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerName="dnsmasq-dns" containerID="cri-o://bcc91eca8a5e0bba0e5e56e8d5e4b1a8e2ed67fdec89e6d3fcce007474476c2d" gracePeriod=10 Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.296154 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.453460 4774 generic.go:334] "Generic (PLEG): container finished" podID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerID="bcc91eca8a5e0bba0e5e56e8d5e4b1a8e2ed67fdec89e6d3fcce007474476c2d" exitCode=0 Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.453578 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" event={"ID":"859d895c-32e3-412e-89d7-ee321c45a7fe","Type":"ContainerDied","Data":"bcc91eca8a5e0bba0e5e56e8d5e4b1a8e2ed67fdec89e6d3fcce007474476c2d"} Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.453613 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" event={"ID":"859d895c-32e3-412e-89d7-ee321c45a7fe","Type":"ContainerDied","Data":"36b8eae4a84fc612cba5fad682ee918b405babf4fda648c2361738a38283a664"} Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.453625 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36b8eae4a84fc612cba5fad682ee918b405babf4fda648c2361738a38283a664" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.486463 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5978983d-ed01-4414-a4ac-bd04b249957b","Type":"ContainerStarted","Data":"5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a"} Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.522606 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.52258909 podStartE2EDuration="3.52258909s" podCreationTimestamp="2025-11-21 15:38:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:19.520007276 +0000 UTC m=+5690.172206535" watchObservedRunningTime="2025-11-21 15:38:19.52258909 +0000 UTC m=+5690.174788349" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.534187 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fce1c2fe-593d-489c-a4ee-79b2be128d8b","Type":"ContainerStarted","Data":"6cfdad9a8631811badec4d377f5becae5ddcefba9d57ff031efd18d06bebe541"} Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.541468 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.555099 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"21fde3a8-382a-42fe-863a-2c02cb7ccc90","Type":"ContainerStarted","Data":"47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0"} Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.634154 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.63413526 podStartE2EDuration="3.63413526s" podCreationTimestamp="2025-11-21 15:38:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:19.611416352 +0000 UTC m=+5690.263615601" watchObservedRunningTime="2025-11-21 15:38:19.63413526 +0000 UTC m=+5690.286334519" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.704695 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-sb\") pod \"859d895c-32e3-412e-89d7-ee321c45a7fe\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.704748 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-nb\") pod \"859d895c-32e3-412e-89d7-ee321c45a7fe\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.704782 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-config\") pod \"859d895c-32e3-412e-89d7-ee321c45a7fe\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.704865 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-src47\" (UniqueName: \"kubernetes.io/projected/859d895c-32e3-412e-89d7-ee321c45a7fe-kube-api-access-src47\") pod \"859d895c-32e3-412e-89d7-ee321c45a7fe\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.704904 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-dns-svc\") pod \"859d895c-32e3-412e-89d7-ee321c45a7fe\" (UID: \"859d895c-32e3-412e-89d7-ee321c45a7fe\") " Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.715530 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/859d895c-32e3-412e-89d7-ee321c45a7fe-kube-api-access-src47" (OuterVolumeSpecName: "kube-api-access-src47") pod "859d895c-32e3-412e-89d7-ee321c45a7fe" (UID: "859d895c-32e3-412e-89d7-ee321c45a7fe"). InnerVolumeSpecName "kube-api-access-src47". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.779981 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "859d895c-32e3-412e-89d7-ee321c45a7fe" (UID: "859d895c-32e3-412e-89d7-ee321c45a7fe"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.793335 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "859d895c-32e3-412e-89d7-ee321c45a7fe" (UID: "859d895c-32e3-412e-89d7-ee321c45a7fe"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.799267 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-config" (OuterVolumeSpecName: "config") pod "859d895c-32e3-412e-89d7-ee321c45a7fe" (UID: "859d895c-32e3-412e-89d7-ee321c45a7fe"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.804429 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "859d895c-32e3-412e-89d7-ee321c45a7fe" (UID: "859d895c-32e3-412e-89d7-ee321c45a7fe"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.806476 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.806608 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.806691 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.806807 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-src47\" (UniqueName: \"kubernetes.io/projected/859d895c-32e3-412e-89d7-ee321c45a7fe-kube-api-access-src47\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:19 crc kubenswrapper[4774]: I1121 15:38:19.806902 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/859d895c-32e3-412e-89d7-ee321c45a7fe-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.108153 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ab3689-3218-47c5-a72c-7e187b48fc37" path="/var/lib/kubelet/pods/a9ab3689-3218-47c5-a72c-7e187b48fc37/volumes" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.570358 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fce1c2fe-593d-489c-a4ee-79b2be128d8b","Type":"ContainerStarted","Data":"065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00"} Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.571110 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.573435 4774 generic.go:334] "Generic (PLEG): container finished" podID="996312d6-6bfd-47a2-83a4-d43364658f94" containerID="d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5" exitCode=0 Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.573499 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"996312d6-6bfd-47a2-83a4-d43364658f94","Type":"ContainerDied","Data":"d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5"} Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.573590 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9f988f55-pw27d" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.590067 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.590045469 podStartE2EDuration="2.590045469s" podCreationTimestamp="2025-11-21 15:38:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:20.587848347 +0000 UTC m=+5691.240047606" watchObservedRunningTime="2025-11-21 15:38:20.590045469 +0000 UTC m=+5691.242244718" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.607573 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9f988f55-pw27d"] Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.617105 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c9f988f55-pw27d"] Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.834252 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.933765 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-combined-ca-bundle\") pod \"996312d6-6bfd-47a2-83a4-d43364658f94\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.934029 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-config-data\") pod \"996312d6-6bfd-47a2-83a4-d43364658f94\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.934085 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvmhf\" (UniqueName: \"kubernetes.io/projected/996312d6-6bfd-47a2-83a4-d43364658f94-kube-api-access-rvmhf\") pod \"996312d6-6bfd-47a2-83a4-d43364658f94\" (UID: \"996312d6-6bfd-47a2-83a4-d43364658f94\") " Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.939575 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/996312d6-6bfd-47a2-83a4-d43364658f94-kube-api-access-rvmhf" (OuterVolumeSpecName: "kube-api-access-rvmhf") pod "996312d6-6bfd-47a2-83a4-d43364658f94" (UID: "996312d6-6bfd-47a2-83a4-d43364658f94"). InnerVolumeSpecName "kube-api-access-rvmhf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.964009 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "996312d6-6bfd-47a2-83a4-d43364658f94" (UID: "996312d6-6bfd-47a2-83a4-d43364658f94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:20 crc kubenswrapper[4774]: I1121 15:38:20.985657 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-config-data" (OuterVolumeSpecName: "config-data") pod "996312d6-6bfd-47a2-83a4-d43364658f94" (UID: "996312d6-6bfd-47a2-83a4-d43364658f94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.015364 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.035795 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.035844 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvmhf\" (UniqueName: \"kubernetes.io/projected/996312d6-6bfd-47a2-83a4-d43364658f94-kube-api-access-rvmhf\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.035855 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/996312d6-6bfd-47a2-83a4-d43364658f94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.185159 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.584740 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.588948 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"996312d6-6bfd-47a2-83a4-d43364658f94","Type":"ContainerDied","Data":"9f224c21fe26268d0f680ab7a1b3da95ff749c4d9060ac9c47b92e32f48cbc7a"} Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.589058 4774 scope.go:117] "RemoveContainer" containerID="d57481469ee63be2532071a8c3856420f643f85b1c91fe469f6acf85573d98c5" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.628874 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.636675 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.654613 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:38:21 crc kubenswrapper[4774]: E1121 15:38:21.654978 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerName="init" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.654996 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerName="init" Nov 21 15:38:21 crc kubenswrapper[4774]: E1121 15:38:21.655011 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerName="dnsmasq-dns" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.655017 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerName="dnsmasq-dns" Nov 21 15:38:21 crc kubenswrapper[4774]: E1121 15:38:21.655032 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="996312d6-6bfd-47a2-83a4-d43364658f94" containerName="nova-cell1-conductor-conductor" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.655038 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="996312d6-6bfd-47a2-83a4-d43364658f94" containerName="nova-cell1-conductor-conductor" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.655219 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="996312d6-6bfd-47a2-83a4-d43364658f94" containerName="nova-cell1-conductor-conductor" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.655234 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" containerName="dnsmasq-dns" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.655809 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.666587 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.680774 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.750936 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.751111 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.751190 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qp7c\" (UniqueName: \"kubernetes.io/projected/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-kube-api-access-2qp7c\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.832266 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.832315 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.853350 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.853437 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qp7c\" (UniqueName: \"kubernetes.io/projected/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-kube-api-access-2qp7c\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.853509 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.860406 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.862523 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:21 crc kubenswrapper[4774]: I1121 15:38:21.876598 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qp7c\" (UniqueName: \"kubernetes.io/projected/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-kube-api-access-2qp7c\") pod \"nova-cell1-conductor-0\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:22 crc kubenswrapper[4774]: I1121 15:38:22.002241 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:22 crc kubenswrapper[4774]: I1121 15:38:22.122653 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="859d895c-32e3-412e-89d7-ee321c45a7fe" path="/var/lib/kubelet/pods/859d895c-32e3-412e-89d7-ee321c45a7fe/volumes" Nov 21 15:38:22 crc kubenswrapper[4774]: I1121 15:38:22.123381 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="996312d6-6bfd-47a2-83a4-d43364658f94" path="/var/lib/kubelet/pods/996312d6-6bfd-47a2-83a4-d43364658f94/volumes" Nov 21 15:38:22 crc kubenswrapper[4774]: I1121 15:38:22.614525 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 15:38:23 crc kubenswrapper[4774]: I1121 15:38:23.617792 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784","Type":"ContainerStarted","Data":"1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10"} Nov 21 15:38:23 crc kubenswrapper[4774]: I1121 15:38:23.618126 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784","Type":"ContainerStarted","Data":"a42817418a58f63cf62a15b84fa4cd528b6c4a625d62147d44d8ee4e99a064cc"} Nov 21 15:38:23 crc kubenswrapper[4774]: I1121 15:38:23.619299 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:23 crc kubenswrapper[4774]: I1121 15:38:23.670801 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:23 crc kubenswrapper[4774]: I1121 15:38:23.682797 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:23 crc kubenswrapper[4774]: I1121 15:38:23.707584 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.707568029 podStartE2EDuration="2.707568029s" podCreationTimestamp="2025-11-21 15:38:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:23.649990987 +0000 UTC m=+5694.302190246" watchObservedRunningTime="2025-11-21 15:38:23.707568029 +0000 UTC m=+5694.359767288" Nov 21 15:38:24 crc kubenswrapper[4774]: I1121 15:38:24.646203 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.014909 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.054692 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.700657 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.818534 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.818599 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.831858 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 15:38:26 crc kubenswrapper[4774]: I1121 15:38:26.831922 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 15:38:27 crc kubenswrapper[4774]: I1121 15:38:27.035474 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 21 15:38:27 crc kubenswrapper[4774]: I1121 15:38:27.903400 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.82:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:38:27 crc kubenswrapper[4774]: I1121 15:38:27.986029 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:38:27 crc kubenswrapper[4774]: I1121 15:38:27.986028 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.82:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:38:27 crc kubenswrapper[4774]: I1121 15:38:27.986462 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 15:38:28 crc kubenswrapper[4774]: I1121 15:38:28.813982 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.824214 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.826086 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.826443 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.826479 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.830746 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.834650 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.836601 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.836752 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 15:38:36 crc kubenswrapper[4774]: I1121 15:38:36.837773 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.720393 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.722256 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.724301 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.741918 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.785935 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.903720 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.903761 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bd9s\" (UniqueName: \"kubernetes.io/projected/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-kube-api-access-4bd9s\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.904152 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.904220 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.904370 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:37 crc kubenswrapper[4774]: I1121 15:38:37.904427 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005611 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005677 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005747 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005783 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005855 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005880 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bd9s\" (UniqueName: \"kubernetes.io/projected/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-kube-api-access-4bd9s\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.005932 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.011206 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-scripts\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.011499 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.011995 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.013196 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.025444 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bd9s\" (UniqueName: \"kubernetes.io/projected/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-kube-api-access-4bd9s\") pod \"cinder-scheduler-0\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.040463 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.505730 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:38 crc kubenswrapper[4774]: W1121 15:38:38.524139 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea14565c_e42a_4a89_868c_ccc65fa1b4b2.slice/crio-98a677aead4ce947ff0af671ab791cc2d061c88ce56b2eb86e71c0f321945997 WatchSource:0}: Error finding container 98a677aead4ce947ff0af671ab791cc2d061c88ce56b2eb86e71c0f321945997: Status 404 returned error can't find the container with id 98a677aead4ce947ff0af671ab791cc2d061c88ce56b2eb86e71c0f321945997 Nov 21 15:38:38 crc kubenswrapper[4774]: I1121 15:38:38.798935 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea14565c-e42a-4a89-868c-ccc65fa1b4b2","Type":"ContainerStarted","Data":"98a677aead4ce947ff0af671ab791cc2d061c88ce56b2eb86e71c0f321945997"} Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.367223 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.367716 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api-log" containerID="cri-o://7b9913c1dbedfeb35315bd8c4e930171af39bd6341aa4b26b42b9f943b7c6fcc" gracePeriod=30 Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.367851 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api" containerID="cri-o://655b784ea30c404876264cfa245b16fc0b846353916e22e09699d26e0c3047af" gracePeriod=30 Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.824134 4774 generic.go:334] "Generic (PLEG): container finished" podID="1c6cb818-68c1-403e-8617-825998ad04ed" containerID="7b9913c1dbedfeb35315bd8c4e930171af39bd6341aa4b26b42b9f943b7c6fcc" exitCode=143 Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.824175 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1c6cb818-68c1-403e-8617-825998ad04ed","Type":"ContainerDied","Data":"7b9913c1dbedfeb35315bd8c4e930171af39bd6341aa4b26b42b9f943b7c6fcc"} Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.826920 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea14565c-e42a-4a89-868c-ccc65fa1b4b2","Type":"ContainerStarted","Data":"643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a"} Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.826978 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea14565c-e42a-4a89-868c-ccc65fa1b4b2","Type":"ContainerStarted","Data":"4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01"} Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.851774 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=2.85175075 podStartE2EDuration="2.85175075s" podCreationTimestamp="2025-11-21 15:38:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:39.843293029 +0000 UTC m=+5710.495492298" watchObservedRunningTime="2025-11-21 15:38:39.85175075 +0000 UTC m=+5710.503950009" Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.971496 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.974252 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:39 crc kubenswrapper[4774]: I1121 15:38:39.981767 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-volume-volume1-config-data" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.011501 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071272 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071346 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-sys\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071429 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071461 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071485 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071505 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071536 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-run\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071556 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/77096ba7-ee2f-41f7-9457-a85714d0881c-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071761 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.071893 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.072041 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-dev\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.072077 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.072113 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.072166 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rkmbl\" (UniqueName: \"kubernetes.io/projected/77096ba7-ee2f-41f7-9457-a85714d0881c-kube-api-access-rkmbl\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.072264 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.072404 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.174728 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.174841 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.174910 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-dev\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.174950 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.174976 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175009 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rkmbl\" (UniqueName: \"kubernetes.io/projected/77096ba7-ee2f-41f7-9457-a85714d0881c-kube-api-access-rkmbl\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175046 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175130 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175135 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-locks-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175177 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175207 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-sys\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175256 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175283 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175318 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175342 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175372 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-run\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175396 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/77096ba7-ee2f-41f7-9457-a85714d0881c-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175743 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-dev\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.175947 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-locks-brick\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176001 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-sys\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176118 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-lib-modules\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176141 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-var-lib-cinder\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176136 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-iscsi\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176204 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-machine-id\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176246 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-etc-nvme\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.176310 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/77096ba7-ee2f-41f7-9457-a85714d0881c-run\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.180646 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-combined-ca-bundle\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.180553 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-config-data-custom\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.189649 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-config-data\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.190056 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/77096ba7-ee2f-41f7-9457-a85714d0881c-ceph\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.190137 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77096ba7-ee2f-41f7-9457-a85714d0881c-scripts\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.193970 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rkmbl\" (UniqueName: \"kubernetes.io/projected/77096ba7-ee2f-41f7-9457-a85714d0881c-kube-api-access-rkmbl\") pod \"cinder-volume-volume1-0\" (UID: \"77096ba7-ee2f-41f7-9457-a85714d0881c\") " pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.294452 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.679964 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-backup-0"] Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.681905 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.684466 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-backup-config-data" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.693649 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788185 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-dev\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788260 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788296 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-sys\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788316 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788438 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-lib-modules\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788503 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-scripts\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788755 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-nvme\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.788834 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-config-data\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789014 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789076 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-run\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789138 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-config-data-custom\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789225 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2tth\" (UniqueName: \"kubernetes.io/projected/546269f7-ad07-47a4-9a7b-7d98236673c2-kube-api-access-b2tth\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789254 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789287 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789315 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/546269f7-ad07-47a4-9a7b-7d98236673c2-ceph\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.789432 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.847212 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-volume-volume1-0"] Nov 21 15:38:40 crc kubenswrapper[4774]: W1121 15:38:40.853122 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77096ba7_ee2f_41f7_9457_a85714d0881c.slice/crio-06dc1e609b12768b4fac7bc545df7457597b7db8757fc94fe41a1390825b67ac WatchSource:0}: Error finding container 06dc1e609b12768b4fac7bc545df7457597b7db8757fc94fe41a1390825b67ac: Status 404 returned error can't find the container with id 06dc1e609b12768b4fac7bc545df7457597b7db8757fc94fe41a1390825b67ac Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.855752 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.890960 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891034 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-run\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891099 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-config-data-custom\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891134 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2tth\" (UniqueName: \"kubernetes.io/projected/546269f7-ad07-47a4-9a7b-7d98236673c2-kube-api-access-b2tth\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-run\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891130 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-iscsi\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-iscsi\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891169 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891247 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-brick\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-locks-brick\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891371 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891313 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-cinder\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-lib-cinder\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891453 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/546269f7-ad07-47a4-9a7b-7d98236673c2-ceph\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.891983 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892012 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-dev\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892077 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892111 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-sys\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892111 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-machine-id\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892130 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892171 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-locks-cinder\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-var-locks-cinder\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-lib-modules\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892202 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dev\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-dev\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892234 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-scripts\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892402 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-nvme\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892429 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-config-data\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892467 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sys\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-sys\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.892749 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-nvme\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-etc-nvme\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.893549 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lib-modules\" (UniqueName: \"kubernetes.io/host-path/546269f7-ad07-47a4-9a7b-7d98236673c2-lib-modules\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.896618 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-config-data-custom\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.897035 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/546269f7-ad07-47a4-9a7b-7d98236673c2-ceph\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.900286 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-scripts\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.900740 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-combined-ca-bundle\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.900861 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/546269f7-ad07-47a4-9a7b-7d98236673c2-config-data\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:40 crc kubenswrapper[4774]: I1121 15:38:40.910480 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2tth\" (UniqueName: \"kubernetes.io/projected/546269f7-ad07-47a4-9a7b-7d98236673c2-kube-api-access-b2tth\") pod \"cinder-backup-0\" (UID: \"546269f7-ad07-47a4-9a7b-7d98236673c2\") " pod="openstack/cinder-backup-0" Nov 21 15:38:41 crc kubenswrapper[4774]: I1121 15:38:41.010599 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-backup-0" Nov 21 15:38:41 crc kubenswrapper[4774]: I1121 15:38:41.570700 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-backup-0"] Nov 21 15:38:41 crc kubenswrapper[4774]: W1121 15:38:41.576087 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod546269f7_ad07_47a4_9a7b_7d98236673c2.slice/crio-df2d4e780652d51031255aa63adea26cf259cb8f1c7b9e59afb5de82f3ab5f0b WatchSource:0}: Error finding container df2d4e780652d51031255aa63adea26cf259cb8f1c7b9e59afb5de82f3ab5f0b: Status 404 returned error can't find the container with id df2d4e780652d51031255aa63adea26cf259cb8f1c7b9e59afb5de82f3ab5f0b Nov 21 15:38:41 crc kubenswrapper[4774]: I1121 15:38:41.847497 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"546269f7-ad07-47a4-9a7b-7d98236673c2","Type":"ContainerStarted","Data":"df2d4e780652d51031255aa63adea26cf259cb8f1c7b9e59afb5de82f3ab5f0b"} Nov 21 15:38:41 crc kubenswrapper[4774]: I1121 15:38:41.849146 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"77096ba7-ee2f-41f7-9457-a85714d0881c","Type":"ContainerStarted","Data":"06dc1e609b12768b4fac7bc545df7457597b7db8757fc94fe41a1390825b67ac"} Nov 21 15:38:42 crc kubenswrapper[4774]: I1121 15:38:42.867585 4774 generic.go:334] "Generic (PLEG): container finished" podID="1c6cb818-68c1-403e-8617-825998ad04ed" containerID="655b784ea30c404876264cfa245b16fc0b846353916e22e09699d26e0c3047af" exitCode=0 Nov 21 15:38:42 crc kubenswrapper[4774]: I1121 15:38:42.867659 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1c6cb818-68c1-403e-8617-825998ad04ed","Type":"ContainerDied","Data":"655b784ea30c404876264cfa245b16fc0b846353916e22e09699d26e0c3047af"} Nov 21 15:38:42 crc kubenswrapper[4774]: I1121 15:38:42.879184 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"77096ba7-ee2f-41f7-9457-a85714d0881c","Type":"ContainerStarted","Data":"4fdd28d9c7ab43cf2eb6ffeee8e85a27eca45f07c63177467bbe26f377f76b8e"} Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.041444 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.334569 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.355489 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.355651 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-combined-ca-bundle\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.356489 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c6cb818-68c1-403e-8617-825998ad04ed-logs\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.356512 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c6cb818-68c1-403e-8617-825998ad04ed-etc-machine-id\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.356703 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-scripts\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.356723 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr7ws\" (UniqueName: \"kubernetes.io/projected/1c6cb818-68c1-403e-8617-825998ad04ed-kube-api-access-jr7ws\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.357047 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data-custom\") pod \"1c6cb818-68c1-403e-8617-825998ad04ed\" (UID: \"1c6cb818-68c1-403e-8617-825998ad04ed\") " Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.358066 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1c6cb818-68c1-403e-8617-825998ad04ed-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.359802 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c6cb818-68c1-403e-8617-825998ad04ed-logs" (OuterVolumeSpecName: "logs") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.359928 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c6cb818-68c1-403e-8617-825998ad04ed-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.359949 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1c6cb818-68c1-403e-8617-825998ad04ed-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.366645 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.374735 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-scripts" (OuterVolumeSpecName: "scripts") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.401503 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c6cb818-68c1-403e-8617-825998ad04ed-kube-api-access-jr7ws" (OuterVolumeSpecName: "kube-api-access-jr7ws") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "kube-api-access-jr7ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.426967 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.442998 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data" (OuterVolumeSpecName: "config-data") pod "1c6cb818-68c1-403e-8617-825998ad04ed" (UID: "1c6cb818-68c1-403e-8617-825998ad04ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.461995 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.462041 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr7ws\" (UniqueName: \"kubernetes.io/projected/1c6cb818-68c1-403e-8617-825998ad04ed-kube-api-access-jr7ws\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.462058 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.462072 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.462088 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c6cb818-68c1-403e-8617-825998ad04ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.892808 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"1c6cb818-68c1-403e-8617-825998ad04ed","Type":"ContainerDied","Data":"1513aa1917764b85a2189802d21b7db1e6f42635a7c1482665ca8c120da6520f"} Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.893158 4774 scope.go:117] "RemoveContainer" containerID="655b784ea30c404876264cfa245b16fc0b846353916e22e09699d26e0c3047af" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.893317 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.897607 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"546269f7-ad07-47a4-9a7b-7d98236673c2","Type":"ContainerStarted","Data":"884f1b6998ec7582e7d27cfe8a989b018ecf4a00d6ef545a06eb92c3c374dd9a"} Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.910014 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-volume-volume1-0" event={"ID":"77096ba7-ee2f-41f7-9457-a85714d0881c","Type":"ContainerStarted","Data":"04f8a88efe434ace7bf25c197198bd12d46e527e1a611462f40687c3044adbd4"} Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.929724 4774 scope.go:117] "RemoveContainer" containerID="7b9913c1dbedfeb35315bd8c4e930171af39bd6341aa4b26b42b9f943b7c6fcc" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.937339 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-volume-volume1-0" podStartSLOduration=3.639099746 podStartE2EDuration="4.937322174s" podCreationTimestamp="2025-11-21 15:38:39 +0000 UTC" firstStartedPulling="2025-11-21 15:38:40.855470472 +0000 UTC m=+5711.507669731" lastFinishedPulling="2025-11-21 15:38:42.1536929 +0000 UTC m=+5712.805892159" observedRunningTime="2025-11-21 15:38:43.934560746 +0000 UTC m=+5714.586760005" watchObservedRunningTime="2025-11-21 15:38:43.937322174 +0000 UTC m=+5714.589521433" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.955770 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.961853 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.980309 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:43 crc kubenswrapper[4774]: E1121 15:38:43.980908 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.980934 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api" Nov 21 15:38:43 crc kubenswrapper[4774]: E1121 15:38:43.980956 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api-log" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.980963 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api-log" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.981169 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.981186 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" containerName="cinder-api-log" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.983725 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.993357 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Nov 21 15:38:43 crc kubenswrapper[4774]: I1121 15:38:43.998036 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074054 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-config-data-custom\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074136 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-config-data\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074192 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-scripts\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074219 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/582f360a-4d3f-4177-989c-b4c05a1013df-logs\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074249 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074328 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/582f360a-4d3f-4177-989c-b4c05a1013df-etc-machine-id\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.074533 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcmvj\" (UniqueName: \"kubernetes.io/projected/582f360a-4d3f-4177-989c-b4c05a1013df-kube-api-access-kcmvj\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.124361 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c6cb818-68c1-403e-8617-825998ad04ed" path="/var/lib/kubelet/pods/1c6cb818-68c1-403e-8617-825998ad04ed/volumes" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179124 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/582f360a-4d3f-4177-989c-b4c05a1013df-etc-machine-id\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179346 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcmvj\" (UniqueName: \"kubernetes.io/projected/582f360a-4d3f-4177-989c-b4c05a1013df-kube-api-access-kcmvj\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179452 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-config-data-custom\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179356 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/582f360a-4d3f-4177-989c-b4c05a1013df-etc-machine-id\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-config-data\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179558 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-scripts\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179586 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/582f360a-4d3f-4177-989c-b4c05a1013df-logs\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.179613 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.180179 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/582f360a-4d3f-4177-989c-b4c05a1013df-logs\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.184470 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-config-data-custom\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.184481 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-config-data\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.184779 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.184898 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/582f360a-4d3f-4177-989c-b4c05a1013df-scripts\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.195981 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcmvj\" (UniqueName: \"kubernetes.io/projected/582f360a-4d3f-4177-989c-b4c05a1013df-kube-api-access-kcmvj\") pod \"cinder-api-0\" (UID: \"582f360a-4d3f-4177-989c-b4c05a1013df\") " pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.312051 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.768969 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.922432 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-backup-0" event={"ID":"546269f7-ad07-47a4-9a7b-7d98236673c2","Type":"ContainerStarted","Data":"b797ad7a8cc4e2abeebad92ee4be9902d4f95ee7c1db2aa068b5443d24283022"} Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.927420 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"582f360a-4d3f-4177-989c-b4c05a1013df","Type":"ContainerStarted","Data":"e14857c7ca701c796c8986117cc1289d9f3bbfa0bc69be81b2dbadf2673d30ed"} Nov 21 15:38:44 crc kubenswrapper[4774]: I1121 15:38:44.954333 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-backup-0" podStartSLOduration=3.281050567 podStartE2EDuration="4.954287935s" podCreationTimestamp="2025-11-21 15:38:40 +0000 UTC" firstStartedPulling="2025-11-21 15:38:41.577725191 +0000 UTC m=+5712.229924460" lastFinishedPulling="2025-11-21 15:38:43.250962559 +0000 UTC m=+5713.903161828" observedRunningTime="2025-11-21 15:38:44.945115223 +0000 UTC m=+5715.597314472" watchObservedRunningTime="2025-11-21 15:38:44.954287935 +0000 UTC m=+5715.606487194" Nov 21 15:38:45 crc kubenswrapper[4774]: I1121 15:38:45.294866 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:45 crc kubenswrapper[4774]: I1121 15:38:45.940102 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"582f360a-4d3f-4177-989c-b4c05a1013df","Type":"ContainerStarted","Data":"259b7b9941aee7e015bfccae1d0d6975fa8d2572b62a0f3b3695d57e10062c10"} Nov 21 15:38:46 crc kubenswrapper[4774]: I1121 15:38:46.011790 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-backup-0" Nov 21 15:38:46 crc kubenswrapper[4774]: I1121 15:38:46.950982 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"582f360a-4d3f-4177-989c-b4c05a1013df","Type":"ContainerStarted","Data":"a1559762d4d87d8fd4c6a9ac710e861b1fc213464fe20168f7d75179e9fa6523"} Nov 21 15:38:46 crc kubenswrapper[4774]: I1121 15:38:46.951110 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Nov 21 15:38:46 crc kubenswrapper[4774]: I1121 15:38:46.971937 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.971913779 podStartE2EDuration="3.971913779s" podCreationTimestamp="2025-11-21 15:38:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:46.966781613 +0000 UTC m=+5717.618980882" watchObservedRunningTime="2025-11-21 15:38:46.971913779 +0000 UTC m=+5717.624113038" Nov 21 15:38:48 crc kubenswrapper[4774]: I1121 15:38:48.248061 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 21 15:38:48 crc kubenswrapper[4774]: I1121 15:38:48.284365 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:48 crc kubenswrapper[4774]: I1121 15:38:48.966542 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="cinder-scheduler" containerID="cri-o://4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01" gracePeriod=30 Nov 21 15:38:48 crc kubenswrapper[4774]: I1121 15:38:48.966641 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="probe" containerID="cri-o://643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a" gracePeriod=30 Nov 21 15:38:50 crc kubenswrapper[4774]: I1121 15:38:50.494931 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-volume-volume1-0" Nov 21 15:38:50 crc kubenswrapper[4774]: I1121 15:38:50.997207 4774 generic.go:334] "Generic (PLEG): container finished" podID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerID="643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a" exitCode=0 Nov 21 15:38:50 crc kubenswrapper[4774]: I1121 15:38:50.997278 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea14565c-e42a-4a89-868c-ccc65fa1b4b2","Type":"ContainerDied","Data":"643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a"} Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.205259 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-backup-0" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.795455 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.832811 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-scripts\") pod \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.832890 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data-custom\") pod \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.832914 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-etc-machine-id\") pod \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.832979 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-combined-ca-bundle\") pod \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.833144 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data\") pod \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.833187 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bd9s\" (UniqueName: \"kubernetes.io/projected/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-kube-api-access-4bd9s\") pod \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\" (UID: \"ea14565c-e42a-4a89-868c-ccc65fa1b4b2\") " Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.833789 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ea14565c-e42a-4a89-868c-ccc65fa1b4b2" (UID: "ea14565c-e42a-4a89-868c-ccc65fa1b4b2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.838623 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ea14565c-e42a-4a89-868c-ccc65fa1b4b2" (UID: "ea14565c-e42a-4a89-868c-ccc65fa1b4b2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.840723 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-kube-api-access-4bd9s" (OuterVolumeSpecName: "kube-api-access-4bd9s") pod "ea14565c-e42a-4a89-868c-ccc65fa1b4b2" (UID: "ea14565c-e42a-4a89-868c-ccc65fa1b4b2"). InnerVolumeSpecName "kube-api-access-4bd9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.856118 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-scripts" (OuterVolumeSpecName: "scripts") pod "ea14565c-e42a-4a89-868c-ccc65fa1b4b2" (UID: "ea14565c-e42a-4a89-868c-ccc65fa1b4b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.909439 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea14565c-e42a-4a89-868c-ccc65fa1b4b2" (UID: "ea14565c-e42a-4a89-868c-ccc65fa1b4b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.935732 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.935774 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data-custom\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.935788 4774 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.935798 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.935809 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bd9s\" (UniqueName: \"kubernetes.io/projected/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-kube-api-access-4bd9s\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:51 crc kubenswrapper[4774]: I1121 15:38:51.950843 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data" (OuterVolumeSpecName: "config-data") pod "ea14565c-e42a-4a89-868c-ccc65fa1b4b2" (UID: "ea14565c-e42a-4a89-868c-ccc65fa1b4b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.007994 4774 generic.go:334] "Generic (PLEG): container finished" podID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerID="4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01" exitCode=0 Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.008044 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea14565c-e42a-4a89-868c-ccc65fa1b4b2","Type":"ContainerDied","Data":"4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01"} Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.008076 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ea14565c-e42a-4a89-868c-ccc65fa1b4b2","Type":"ContainerDied","Data":"98a677aead4ce947ff0af671ab791cc2d061c88ce56b2eb86e71c0f321945997"} Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.008096 4774 scope.go:117] "RemoveContainer" containerID="643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.008237 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.038773 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea14565c-e42a-4a89-868c-ccc65fa1b4b2-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.052913 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.062916 4774 scope.go:117] "RemoveContainer" containerID="4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.072951 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.088749 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:52 crc kubenswrapper[4774]: E1121 15:38:52.089244 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="probe" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.089264 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="probe" Nov 21 15:38:52 crc kubenswrapper[4774]: E1121 15:38:52.089279 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="cinder-scheduler" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.089286 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="cinder-scheduler" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.089494 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="cinder-scheduler" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.089519 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" containerName="probe" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.089878 4774 scope.go:117] "RemoveContainer" containerID="643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.090907 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: E1121 15:38:52.092525 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a\": container with ID starting with 643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a not found: ID does not exist" containerID="643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.092582 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a"} err="failed to get container status \"643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a\": rpc error: code = NotFound desc = could not find container \"643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a\": container with ID starting with 643c36f121f76b804f59bd4a79176a368f8e9860f2a64a8c3a97249bc42d7c5a not found: ID does not exist" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.092609 4774 scope.go:117] "RemoveContainer" containerID="4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01" Nov 21 15:38:52 crc kubenswrapper[4774]: E1121 15:38:52.094060 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01\": container with ID starting with 4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01 not found: ID does not exist" containerID="4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.094124 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01"} err="failed to get container status \"4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01\": rpc error: code = NotFound desc = could not find container \"4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01\": container with ID starting with 4523cdf85a76c5e6af9343674809c258d5841763bd0ac4a5fb2df9528a58bb01 not found: ID does not exist" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.094274 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.127418 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea14565c-e42a-4a89-868c-ccc65fa1b4b2" path="/var/lib/kubelet/pods/ea14565c-e42a-4a89-868c-ccc65fa1b4b2/volumes" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.128918 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.242119 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.242577 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.242726 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c96ad48-aa6a-4f51-a95c-12971f46255f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.242877 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4bmk\" (UniqueName: \"kubernetes.io/projected/2c96ad48-aa6a-4f51-a95c-12971f46255f-kube-api-access-v4bmk\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.243007 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-scripts\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.243135 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-config-data\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344123 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-scripts\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344167 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-config-data\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344220 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344305 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344337 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c96ad48-aa6a-4f51-a95c-12971f46255f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344355 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4bmk\" (UniqueName: \"kubernetes.io/projected/2c96ad48-aa6a-4f51-a95c-12971f46255f-kube-api-access-v4bmk\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.344632 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2c96ad48-aa6a-4f51-a95c-12971f46255f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.348594 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-scripts\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.349005 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-config-data\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.349496 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.350069 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2c96ad48-aa6a-4f51-a95c-12971f46255f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.361461 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4bmk\" (UniqueName: \"kubernetes.io/projected/2c96ad48-aa6a-4f51-a95c-12971f46255f-kube-api-access-v4bmk\") pod \"cinder-scheduler-0\" (UID: \"2c96ad48-aa6a-4f51-a95c-12971f46255f\") " pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.410805 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Nov 21 15:38:52 crc kubenswrapper[4774]: I1121 15:38:52.905558 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Nov 21 15:38:53 crc kubenswrapper[4774]: I1121 15:38:53.018606 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2c96ad48-aa6a-4f51-a95c-12971f46255f","Type":"ContainerStarted","Data":"50d869b8ac430b9b3cdda154fc85967df3c3f703fe0b8d47231221d14e84e2e6"} Nov 21 15:38:54 crc kubenswrapper[4774]: I1121 15:38:54.033114 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2c96ad48-aa6a-4f51-a95c-12971f46255f","Type":"ContainerStarted","Data":"199bc7d0025c319186d2899b09979d9c1274cd0b219c52af40b89f8c63816421"} Nov 21 15:38:55 crc kubenswrapper[4774]: I1121 15:38:55.045337 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"2c96ad48-aa6a-4f51-a95c-12971f46255f","Type":"ContainerStarted","Data":"ce7db398e12f6261aa66ac8a0bdaecfd557df6e5c7c9f54cf0e0ba867b299ade"} Nov 21 15:38:55 crc kubenswrapper[4774]: I1121 15:38:55.089349 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.089309067 podStartE2EDuration="3.089309067s" podCreationTimestamp="2025-11-21 15:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:38:55.075526584 +0000 UTC m=+5725.727725853" watchObservedRunningTime="2025-11-21 15:38:55.089309067 +0000 UTC m=+5725.741508326" Nov 21 15:38:56 crc kubenswrapper[4774]: I1121 15:38:56.145611 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Nov 21 15:38:57 crc kubenswrapper[4774]: I1121 15:38:57.411449 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Nov 21 15:39:02 crc kubenswrapper[4774]: I1121 15:39:02.617631 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.677201 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5d8lb"] Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.682906 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.688976 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5d8lb"] Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.787502 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.787598 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rm2f\" (UniqueName: \"kubernetes.io/projected/532d29c6-e5da-402a-a382-81998f785537-kube-api-access-8rm2f\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.787842 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-utilities\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.889558 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.889650 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rm2f\" (UniqueName: \"kubernetes.io/projected/532d29c6-e5da-402a-a382-81998f785537-kube-api-access-8rm2f\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.889730 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-utilities\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.890149 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.890287 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-utilities\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:17 crc kubenswrapper[4774]: I1121 15:39:17.914801 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rm2f\" (UniqueName: \"kubernetes.io/projected/532d29c6-e5da-402a-a382-81998f785537-kube-api-access-8rm2f\") pod \"certified-operators-5d8lb\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:18 crc kubenswrapper[4774]: I1121 15:39:18.019288 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:18 crc kubenswrapper[4774]: I1121 15:39:18.552560 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5d8lb"] Nov 21 15:39:19 crc kubenswrapper[4774]: I1121 15:39:19.279547 4774 generic.go:334] "Generic (PLEG): container finished" podID="532d29c6-e5da-402a-a382-81998f785537" containerID="82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a" exitCode=0 Nov 21 15:39:19 crc kubenswrapper[4774]: I1121 15:39:19.279591 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d8lb" event={"ID":"532d29c6-e5da-402a-a382-81998f785537","Type":"ContainerDied","Data":"82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a"} Nov 21 15:39:19 crc kubenswrapper[4774]: I1121 15:39:19.279852 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d8lb" event={"ID":"532d29c6-e5da-402a-a382-81998f785537","Type":"ContainerStarted","Data":"43e9752b36fe6f955462c14ef58a91ea1e01d8dd8ebc55fc72b803c0cd64bb0d"} Nov 21 15:39:21 crc kubenswrapper[4774]: I1121 15:39:21.301452 4774 generic.go:334] "Generic (PLEG): container finished" podID="532d29c6-e5da-402a-a382-81998f785537" containerID="8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3" exitCode=0 Nov 21 15:39:21 crc kubenswrapper[4774]: I1121 15:39:21.301770 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d8lb" event={"ID":"532d29c6-e5da-402a-a382-81998f785537","Type":"ContainerDied","Data":"8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3"} Nov 21 15:39:22 crc kubenswrapper[4774]: I1121 15:39:22.312383 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d8lb" event={"ID":"532d29c6-e5da-402a-a382-81998f785537","Type":"ContainerStarted","Data":"b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803"} Nov 21 15:39:22 crc kubenswrapper[4774]: I1121 15:39:22.333803 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5d8lb" podStartSLOduration=2.878248988 podStartE2EDuration="5.333783727s" podCreationTimestamp="2025-11-21 15:39:17 +0000 UTC" firstStartedPulling="2025-11-21 15:39:19.282152736 +0000 UTC m=+5749.934351995" lastFinishedPulling="2025-11-21 15:39:21.737687475 +0000 UTC m=+5752.389886734" observedRunningTime="2025-11-21 15:39:22.329773573 +0000 UTC m=+5752.981972862" watchObservedRunningTime="2025-11-21 15:39:22.333783727 +0000 UTC m=+5752.985982986" Nov 21 15:39:28 crc kubenswrapper[4774]: I1121 15:39:28.019474 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:28 crc kubenswrapper[4774]: I1121 15:39:28.020101 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:28 crc kubenswrapper[4774]: I1121 15:39:28.070516 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:28 crc kubenswrapper[4774]: I1121 15:39:28.416058 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:28 crc kubenswrapper[4774]: I1121 15:39:28.459261 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5d8lb"] Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.396398 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5d8lb" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="registry-server" containerID="cri-o://b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803" gracePeriod=2 Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.847408 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.978525 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-utilities\") pod \"532d29c6-e5da-402a-a382-81998f785537\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.978571 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rm2f\" (UniqueName: \"kubernetes.io/projected/532d29c6-e5da-402a-a382-81998f785537-kube-api-access-8rm2f\") pod \"532d29c6-e5da-402a-a382-81998f785537\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.978696 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content\") pod \"532d29c6-e5da-402a-a382-81998f785537\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.979811 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-utilities" (OuterVolumeSpecName: "utilities") pod "532d29c6-e5da-402a-a382-81998f785537" (UID: "532d29c6-e5da-402a-a382-81998f785537"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:39:30 crc kubenswrapper[4774]: I1121 15:39:30.985029 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/532d29c6-e5da-402a-a382-81998f785537-kube-api-access-8rm2f" (OuterVolumeSpecName: "kube-api-access-8rm2f") pod "532d29c6-e5da-402a-a382-81998f785537" (UID: "532d29c6-e5da-402a-a382-81998f785537"). InnerVolumeSpecName "kube-api-access-8rm2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.081507 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.081554 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rm2f\" (UniqueName: \"kubernetes.io/projected/532d29c6-e5da-402a-a382-81998f785537-kube-api-access-8rm2f\") on node \"crc\" DevicePath \"\"" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.406941 4774 generic.go:334] "Generic (PLEG): container finished" podID="532d29c6-e5da-402a-a382-81998f785537" containerID="b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803" exitCode=0 Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.406976 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d8lb" event={"ID":"532d29c6-e5da-402a-a382-81998f785537","Type":"ContainerDied","Data":"b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803"} Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.407013 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5d8lb" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.407024 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5d8lb" event={"ID":"532d29c6-e5da-402a-a382-81998f785537","Type":"ContainerDied","Data":"43e9752b36fe6f955462c14ef58a91ea1e01d8dd8ebc55fc72b803c0cd64bb0d"} Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.407042 4774 scope.go:117] "RemoveContainer" containerID="b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.431880 4774 scope.go:117] "RemoveContainer" containerID="8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.453140 4774 scope.go:117] "RemoveContainer" containerID="82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.507765 4774 scope.go:117] "RemoveContainer" containerID="b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803" Nov 21 15:39:31 crc kubenswrapper[4774]: E1121 15:39:31.508283 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803\": container with ID starting with b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803 not found: ID does not exist" containerID="b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.508420 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803"} err="failed to get container status \"b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803\": rpc error: code = NotFound desc = could not find container \"b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803\": container with ID starting with b4341c8d4f66fef46a204caa203d0c5aea983368b448887a00d388ad19b6e803 not found: ID does not exist" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.508531 4774 scope.go:117] "RemoveContainer" containerID="8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3" Nov 21 15:39:31 crc kubenswrapper[4774]: E1121 15:39:31.509086 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3\": container with ID starting with 8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3 not found: ID does not exist" containerID="8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.509121 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3"} err="failed to get container status \"8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3\": rpc error: code = NotFound desc = could not find container \"8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3\": container with ID starting with 8a313baba45e546df992b61a9f7fc5915681884ad99ef7cc01e02d32772d67d3 not found: ID does not exist" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.509149 4774 scope.go:117] "RemoveContainer" containerID="82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a" Nov 21 15:39:31 crc kubenswrapper[4774]: E1121 15:39:31.509564 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a\": container with ID starting with 82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a not found: ID does not exist" containerID="82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.509583 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a"} err="failed to get container status \"82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a\": rpc error: code = NotFound desc = could not find container \"82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a\": container with ID starting with 82d3308a80ac26f00b43e144d4173c4403527c4f23535b5b3a61dfd217b7e32a not found: ID does not exist" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.896735 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "532d29c6-e5da-402a-a382-81998f785537" (UID: "532d29c6-e5da-402a-a382-81998f785537"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.897622 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content\") pod \"532d29c6-e5da-402a-a382-81998f785537\" (UID: \"532d29c6-e5da-402a-a382-81998f785537\") " Nov 21 15:39:31 crc kubenswrapper[4774]: W1121 15:39:31.897731 4774 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/532d29c6-e5da-402a-a382-81998f785537/volumes/kubernetes.io~empty-dir/catalog-content Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.897751 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "532d29c6-e5da-402a-a382-81998f785537" (UID: "532d29c6-e5da-402a-a382-81998f785537"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:39:31 crc kubenswrapper[4774]: I1121 15:39:31.898391 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/532d29c6-e5da-402a-a382-81998f785537-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:39:32 crc kubenswrapper[4774]: I1121 15:39:32.049581 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5d8lb"] Nov 21 15:39:32 crc kubenswrapper[4774]: I1121 15:39:32.057934 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5d8lb"] Nov 21 15:39:32 crc kubenswrapper[4774]: I1121 15:39:32.119282 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="532d29c6-e5da-402a-a382-81998f785537" path="/var/lib/kubelet/pods/532d29c6-e5da-402a-a382-81998f785537/volumes" Nov 21 15:39:41 crc kubenswrapper[4774]: I1121 15:39:41.184949 4774 scope.go:117] "RemoveContainer" containerID="b0ff764da328ba11b7ef5f7d205475a9215fb64c167264937a5946b4631a2590" Nov 21 15:39:41 crc kubenswrapper[4774]: I1121 15:39:41.212883 4774 scope.go:117] "RemoveContainer" containerID="e297a837e5e94299d9704b18eebc5c3a74c3058ac7c711345c65ed0d252fcec2" Nov 21 15:39:59 crc kubenswrapper[4774]: I1121 15:39:59.600790 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:39:59 crc kubenswrapper[4774]: I1121 15:39:59.602799 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:40:28 crc kubenswrapper[4774]: I1121 15:40:28.070579 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-gnn8g"] Nov 21 15:40:28 crc kubenswrapper[4774]: I1121 15:40:28.086640 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-55b3-account-create-h5rkb"] Nov 21 15:40:28 crc kubenswrapper[4774]: I1121 15:40:28.112052 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-gnn8g"] Nov 21 15:40:28 crc kubenswrapper[4774]: I1121 15:40:28.113904 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-55b3-account-create-h5rkb"] Nov 21 15:40:29 crc kubenswrapper[4774]: I1121 15:40:29.600584 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:40:29 crc kubenswrapper[4774]: I1121 15:40:29.600935 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:40:30 crc kubenswrapper[4774]: I1121 15:40:30.108761 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fbd6161-8997-44a4-876a-92c03be70e1d" path="/var/lib/kubelet/pods/3fbd6161-8997-44a4-876a-92c03be70e1d/volumes" Nov 21 15:40:30 crc kubenswrapper[4774]: I1121 15:40:30.109331 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cdf432d-0afa-49af-8682-8080beee68cb" path="/var/lib/kubelet/pods/9cdf432d-0afa-49af-8682-8080beee68cb/volumes" Nov 21 15:40:33 crc kubenswrapper[4774]: I1121 15:40:33.035856 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-gwn8n"] Nov 21 15:40:33 crc kubenswrapper[4774]: I1121 15:40:33.050829 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-gwn8n"] Nov 21 15:40:34 crc kubenswrapper[4774]: I1121 15:40:34.104760 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f601dba9-4b07-49b9-87b7-16a991d8ea4a" path="/var/lib/kubelet/pods/f601dba9-4b07-49b9-87b7-16a991d8ea4a/volumes" Nov 21 15:40:41 crc kubenswrapper[4774]: I1121 15:40:41.314694 4774 scope.go:117] "RemoveContainer" containerID="497639a1b1b6c13089297e4c0b054b5c78d2298e97a2cf5b2172f5d18e00ced5" Nov 21 15:40:41 crc kubenswrapper[4774]: I1121 15:40:41.341650 4774 scope.go:117] "RemoveContainer" containerID="b1721b895dd6548a9989fd723fae8be44590c4ade9d06d79112dea7cb4dda75c" Nov 21 15:40:41 crc kubenswrapper[4774]: I1121 15:40:41.399198 4774 scope.go:117] "RemoveContainer" containerID="0f44471e9903af560b22becb34c09c17ca5f2151bfef4f2fc597c810c3510659" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.831027 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mkddd"] Nov 21 15:40:42 crc kubenswrapper[4774]: E1121 15:40:42.831797 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="extract-content" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.831831 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="extract-content" Nov 21 15:40:42 crc kubenswrapper[4774]: E1121 15:40:42.831864 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="extract-utilities" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.831872 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="extract-utilities" Nov 21 15:40:42 crc kubenswrapper[4774]: E1121 15:40:42.831895 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="registry-server" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.831903 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="registry-server" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.832128 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="532d29c6-e5da-402a-a382-81998f785537" containerName="registry-server" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.833001 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mkddd" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.837261 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-5v69g" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.837542 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.849294 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mkddd"] Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.861471 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-svdd5"] Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.864157 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.878505 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-svdd5"] Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947227 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-lib\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947284 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-scripts\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947308 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-scripts\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947408 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-run\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947459 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j65pz\" (UniqueName: \"kubernetes.io/projected/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-kube-api-access-j65pz\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947518 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-etc-ovs\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947540 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-run\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947604 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8nkz\" (UniqueName: \"kubernetes.io/projected/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-kube-api-access-b8nkz\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947646 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-log\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947695 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-log-ovn\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:42 crc kubenswrapper[4774]: I1121 15:40:42.947763 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-run-ovn\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049226 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-lib\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049269 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-scripts\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049315 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-scripts\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049378 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-run\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049417 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j65pz\" (UniqueName: \"kubernetes.io/projected/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-kube-api-access-j65pz\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049472 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-etc-ovs\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049501 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-run\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049577 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8nkz\" (UniqueName: \"kubernetes.io/projected/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-kube-api-access-b8nkz\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049639 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-log\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049673 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-log-ovn\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049682 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-etc-ovs\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049664 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-run\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049789 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-log-ovn\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049795 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-run-ovn\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049801 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-log\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049885 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-run-ovn\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.049927 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-var-run\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.050043 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-var-lib\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.051242 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-scripts\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.052518 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-scripts\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.070668 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8nkz\" (UniqueName: \"kubernetes.io/projected/0b0db751-b2bd-4d68-a90c-c4c6e2b75216-kube-api-access-b8nkz\") pod \"ovn-controller-ovs-svdd5\" (UID: \"0b0db751-b2bd-4d68-a90c-c4c6e2b75216\") " pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.078806 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j65pz\" (UniqueName: \"kubernetes.io/projected/7e9568d8-f5a4-4bd2-93a2-08df43d611e7-kube-api-access-j65pz\") pod \"ovn-controller-mkddd\" (UID: \"7e9568d8-f5a4-4bd2-93a2-08df43d611e7\") " pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.153485 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mkddd" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.194897 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:43 crc kubenswrapper[4774]: I1121 15:40:43.674644 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mkddd"] Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.124876 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-svdd5"] Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.147077 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-svdd5" event={"ID":"0b0db751-b2bd-4d68-a90c-c4c6e2b75216","Type":"ContainerStarted","Data":"1d536fce4834ec527c5c5763388859868860684f8b31d84a5291c65a7e5810e4"} Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.148188 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mkddd" event={"ID":"7e9568d8-f5a4-4bd2-93a2-08df43d611e7","Type":"ContainerStarted","Data":"190039f881885cb42bb263f701563b11d2a5d5473c8031abc3263bdbf4c8ec61"} Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.148215 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mkddd" event={"ID":"7e9568d8-f5a4-4bd2-93a2-08df43d611e7","Type":"ContainerStarted","Data":"b9bc62f87b77c40a6a8df2cc59e8a027fb3e918075033f2ebbf24f440cc0e24e"} Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.148410 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-mkddd" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.169928 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-mkddd" podStartSLOduration=2.169909971 podStartE2EDuration="2.169909971s" podCreationTimestamp="2025-11-21 15:40:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:40:44.1656748 +0000 UTC m=+5834.817874069" watchObservedRunningTime="2025-11-21 15:40:44.169909971 +0000 UTC m=+5834.822109230" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.367178 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-htq55"] Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.374202 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.376573 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.383006 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-htq55"] Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.483002 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2284d243-017a-4de4-bdd1-5e5d05e56c92-config\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.483070 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2284d243-017a-4de4-bdd1-5e5d05e56c92-ovn-rundir\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.483098 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2284d243-017a-4de4-bdd1-5e5d05e56c92-ovs-rundir\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.483122 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcs2s\" (UniqueName: \"kubernetes.io/projected/2284d243-017a-4de4-bdd1-5e5d05e56c92-kube-api-access-pcs2s\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.584403 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2284d243-017a-4de4-bdd1-5e5d05e56c92-config\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.584471 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2284d243-017a-4de4-bdd1-5e5d05e56c92-ovn-rundir\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.584497 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2284d243-017a-4de4-bdd1-5e5d05e56c92-ovs-rundir\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.584772 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/2284d243-017a-4de4-bdd1-5e5d05e56c92-ovs-rundir\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.584518 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcs2s\" (UniqueName: \"kubernetes.io/projected/2284d243-017a-4de4-bdd1-5e5d05e56c92-kube-api-access-pcs2s\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.584799 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/2284d243-017a-4de4-bdd1-5e5d05e56c92-ovn-rundir\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.585363 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2284d243-017a-4de4-bdd1-5e5d05e56c92-config\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.603608 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcs2s\" (UniqueName: \"kubernetes.io/projected/2284d243-017a-4de4-bdd1-5e5d05e56c92-kube-api-access-pcs2s\") pod \"ovn-controller-metrics-htq55\" (UID: \"2284d243-017a-4de4-bdd1-5e5d05e56c92\") " pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:44 crc kubenswrapper[4774]: I1121 15:40:44.694074 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-htq55" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.162236 4774 generic.go:334] "Generic (PLEG): container finished" podID="0b0db751-b2bd-4d68-a90c-c4c6e2b75216" containerID="90834f01329aef4ea6e1531c62ecd5e364b6114fe806eba9a89f429e2471eb3c" exitCode=0 Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.162384 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-svdd5" event={"ID":"0b0db751-b2bd-4d68-a90c-c4c6e2b75216","Type":"ContainerDied","Data":"90834f01329aef4ea6e1531c62ecd5e364b6114fe806eba9a89f429e2471eb3c"} Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.218705 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-htq55"] Nov 21 15:40:45 crc kubenswrapper[4774]: W1121 15:40:45.237319 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2284d243_017a_4de4_bdd1_5e5d05e56c92.slice/crio-91f070112717012f60991e2ec07d5e76338cfee699a0aec35de7f43efc4dc9ab WatchSource:0}: Error finding container 91f070112717012f60991e2ec07d5e76338cfee699a0aec35de7f43efc4dc9ab: Status 404 returned error can't find the container with id 91f070112717012f60991e2ec07d5e76338cfee699a0aec35de7f43efc4dc9ab Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.348932 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-create-9cpf9"] Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.350934 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.358355 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-9cpf9"] Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.505698 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c9a523-0280-46a5-98da-f9de20a6fbd1-operator-scripts\") pod \"octavia-db-create-9cpf9\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.505804 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sqhv\" (UniqueName: \"kubernetes.io/projected/87c9a523-0280-46a5-98da-f9de20a6fbd1-kube-api-access-8sqhv\") pod \"octavia-db-create-9cpf9\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.616952 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c9a523-0280-46a5-98da-f9de20a6fbd1-operator-scripts\") pod \"octavia-db-create-9cpf9\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.617390 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sqhv\" (UniqueName: \"kubernetes.io/projected/87c9a523-0280-46a5-98da-f9de20a6fbd1-kube-api-access-8sqhv\") pod \"octavia-db-create-9cpf9\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.617858 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c9a523-0280-46a5-98da-f9de20a6fbd1-operator-scripts\") pod \"octavia-db-create-9cpf9\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.656672 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sqhv\" (UniqueName: \"kubernetes.io/projected/87c9a523-0280-46a5-98da-f9de20a6fbd1-kube-api-access-8sqhv\") pod \"octavia-db-create-9cpf9\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:45 crc kubenswrapper[4774]: I1121 15:40:45.709443 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.190932 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-svdd5" event={"ID":"0b0db751-b2bd-4d68-a90c-c4c6e2b75216","Type":"ContainerStarted","Data":"2498b55619737ff9ff0f47a6dd2fbbe89b2c3a615da665acd90ffab0d2fc9bfd"} Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.191404 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-svdd5" event={"ID":"0b0db751-b2bd-4d68-a90c-c4c6e2b75216","Type":"ContainerStarted","Data":"736f0e41f5ecd25d839e885bfa8139d79c784672bc37c8fb65551f67b6cbb0fb"} Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.191439 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.191457 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.193301 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-htq55" event={"ID":"2284d243-017a-4de4-bdd1-5e5d05e56c92","Type":"ContainerStarted","Data":"2c993ba5ae905ebfb7c7a4699d9d61d6ce6d0c2b0b7c73ea95acfba06c870d3a"} Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.193346 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-htq55" event={"ID":"2284d243-017a-4de4-bdd1-5e5d05e56c92","Type":"ContainerStarted","Data":"91f070112717012f60991e2ec07d5e76338cfee699a0aec35de7f43efc4dc9ab"} Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.226455 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-svdd5" podStartSLOduration=4.226426525 podStartE2EDuration="4.226426525s" podCreationTimestamp="2025-11-21 15:40:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:40:46.212973062 +0000 UTC m=+5836.865172321" watchObservedRunningTime="2025-11-21 15:40:46.226426525 +0000 UTC m=+5836.878625784" Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.246958 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-htq55" podStartSLOduration=2.24693155 podStartE2EDuration="2.24693155s" podCreationTimestamp="2025-11-21 15:40:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:40:46.229136742 +0000 UTC m=+5836.881336021" watchObservedRunningTime="2025-11-21 15:40:46.24693155 +0000 UTC m=+5836.899130809" Nov 21 15:40:46 crc kubenswrapper[4774]: I1121 15:40:46.318438 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-create-9cpf9"] Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.047279 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-tt7dv"] Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.071028 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-tt7dv"] Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.164683 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-666b-account-create-4zsxl"] Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.166105 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.169993 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-db-secret" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.175101 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-666b-account-create-4zsxl"] Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.225451 4774 generic.go:334] "Generic (PLEG): container finished" podID="87c9a523-0280-46a5-98da-f9de20a6fbd1" containerID="7a79cf03168020ffbb4bd320527ba574d937e4645bb7ef5d973921a0c49b91b1" exitCode=0 Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.225748 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-9cpf9" event={"ID":"87c9a523-0280-46a5-98da-f9de20a6fbd1","Type":"ContainerDied","Data":"7a79cf03168020ffbb4bd320527ba574d937e4645bb7ef5d973921a0c49b91b1"} Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.225830 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-9cpf9" event={"ID":"87c9a523-0280-46a5-98da-f9de20a6fbd1","Type":"ContainerStarted","Data":"d0a13e46bb6bfcbc661af04a8bc7d0f9e19180f91238228295203372cfd6c8c1"} Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.250669 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56j9b\" (UniqueName: \"kubernetes.io/projected/a1c02231-5d57-41bf-927b-e4443ef9bc99-kube-api-access-56j9b\") pod \"octavia-666b-account-create-4zsxl\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.250740 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1c02231-5d57-41bf-927b-e4443ef9bc99-operator-scripts\") pod \"octavia-666b-account-create-4zsxl\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.353239 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56j9b\" (UniqueName: \"kubernetes.io/projected/a1c02231-5d57-41bf-927b-e4443ef9bc99-kube-api-access-56j9b\") pod \"octavia-666b-account-create-4zsxl\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.353323 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1c02231-5d57-41bf-927b-e4443ef9bc99-operator-scripts\") pod \"octavia-666b-account-create-4zsxl\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.354164 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1c02231-5d57-41bf-927b-e4443ef9bc99-operator-scripts\") pod \"octavia-666b-account-create-4zsxl\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.382180 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56j9b\" (UniqueName: \"kubernetes.io/projected/a1c02231-5d57-41bf-927b-e4443ef9bc99-kube-api-access-56j9b\") pod \"octavia-666b-account-create-4zsxl\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.486112 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:47 crc kubenswrapper[4774]: W1121 15:40:47.960624 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1c02231_5d57_41bf_927b_e4443ef9bc99.slice/crio-84889c057076f1f2009ea71512a0dc461e8b510939aa5cc52431a0c1ce2de6cd WatchSource:0}: Error finding container 84889c057076f1f2009ea71512a0dc461e8b510939aa5cc52431a0c1ce2de6cd: Status 404 returned error can't find the container with id 84889c057076f1f2009ea71512a0dc461e8b510939aa5cc52431a0c1ce2de6cd Nov 21 15:40:47 crc kubenswrapper[4774]: I1121 15:40:47.961525 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-666b-account-create-4zsxl"] Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.103976 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9101eeef-82b1-42af-821d-6061aa431bef" path="/var/lib/kubelet/pods/9101eeef-82b1-42af-821d-6061aa431bef/volumes" Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.240382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-666b-account-create-4zsxl" event={"ID":"a1c02231-5d57-41bf-927b-e4443ef9bc99","Type":"ContainerStarted","Data":"0a5e279aefefc9363d161fa1b6db5f20a23dfaf7e2fb0ccba5539c7d70c056b3"} Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.240434 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-666b-account-create-4zsxl" event={"ID":"a1c02231-5d57-41bf-927b-e4443ef9bc99","Type":"ContainerStarted","Data":"84889c057076f1f2009ea71512a0dc461e8b510939aa5cc52431a0c1ce2de6cd"} Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.259538 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-666b-account-create-4zsxl" podStartSLOduration=1.259519491 podStartE2EDuration="1.259519491s" podCreationTimestamp="2025-11-21 15:40:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:40:48.257215185 +0000 UTC m=+5838.909414444" watchObservedRunningTime="2025-11-21 15:40:48.259519491 +0000 UTC m=+5838.911718750" Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.615322 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.682616 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c9a523-0280-46a5-98da-f9de20a6fbd1-operator-scripts\") pod \"87c9a523-0280-46a5-98da-f9de20a6fbd1\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.682920 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sqhv\" (UniqueName: \"kubernetes.io/projected/87c9a523-0280-46a5-98da-f9de20a6fbd1-kube-api-access-8sqhv\") pod \"87c9a523-0280-46a5-98da-f9de20a6fbd1\" (UID: \"87c9a523-0280-46a5-98da-f9de20a6fbd1\") " Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.684112 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87c9a523-0280-46a5-98da-f9de20a6fbd1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87c9a523-0280-46a5-98da-f9de20a6fbd1" (UID: "87c9a523-0280-46a5-98da-f9de20a6fbd1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.693342 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87c9a523-0280-46a5-98da-f9de20a6fbd1-kube-api-access-8sqhv" (OuterVolumeSpecName: "kube-api-access-8sqhv") pod "87c9a523-0280-46a5-98da-f9de20a6fbd1" (UID: "87c9a523-0280-46a5-98da-f9de20a6fbd1"). InnerVolumeSpecName "kube-api-access-8sqhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.785842 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87c9a523-0280-46a5-98da-f9de20a6fbd1-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:48 crc kubenswrapper[4774]: I1121 15:40:48.785872 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sqhv\" (UniqueName: \"kubernetes.io/projected/87c9a523-0280-46a5-98da-f9de20a6fbd1-kube-api-access-8sqhv\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:49 crc kubenswrapper[4774]: I1121 15:40:49.252764 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-create-9cpf9" event={"ID":"87c9a523-0280-46a5-98da-f9de20a6fbd1","Type":"ContainerDied","Data":"d0a13e46bb6bfcbc661af04a8bc7d0f9e19180f91238228295203372cfd6c8c1"} Nov 21 15:40:49 crc kubenswrapper[4774]: I1121 15:40:49.253055 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0a13e46bb6bfcbc661af04a8bc7d0f9e19180f91238228295203372cfd6c8c1" Nov 21 15:40:49 crc kubenswrapper[4774]: I1121 15:40:49.252870 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-create-9cpf9" Nov 21 15:40:49 crc kubenswrapper[4774]: I1121 15:40:49.255439 4774 generic.go:334] "Generic (PLEG): container finished" podID="a1c02231-5d57-41bf-927b-e4443ef9bc99" containerID="0a5e279aefefc9363d161fa1b6db5f20a23dfaf7e2fb0ccba5539c7d70c056b3" exitCode=0 Nov 21 15:40:49 crc kubenswrapper[4774]: I1121 15:40:49.255471 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-666b-account-create-4zsxl" event={"ID":"a1c02231-5d57-41bf-927b-e4443ef9bc99","Type":"ContainerDied","Data":"0a5e279aefefc9363d161fa1b6db5f20a23dfaf7e2fb0ccba5539c7d70c056b3"} Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.644915 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.721547 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56j9b\" (UniqueName: \"kubernetes.io/projected/a1c02231-5d57-41bf-927b-e4443ef9bc99-kube-api-access-56j9b\") pod \"a1c02231-5d57-41bf-927b-e4443ef9bc99\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.721678 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1c02231-5d57-41bf-927b-e4443ef9bc99-operator-scripts\") pod \"a1c02231-5d57-41bf-927b-e4443ef9bc99\" (UID: \"a1c02231-5d57-41bf-927b-e4443ef9bc99\") " Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.722040 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1c02231-5d57-41bf-927b-e4443ef9bc99-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a1c02231-5d57-41bf-927b-e4443ef9bc99" (UID: "a1c02231-5d57-41bf-927b-e4443ef9bc99"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.722221 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a1c02231-5d57-41bf-927b-e4443ef9bc99-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.725928 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1c02231-5d57-41bf-927b-e4443ef9bc99-kube-api-access-56j9b" (OuterVolumeSpecName: "kube-api-access-56j9b") pod "a1c02231-5d57-41bf-927b-e4443ef9bc99" (UID: "a1c02231-5d57-41bf-927b-e4443ef9bc99"). InnerVolumeSpecName "kube-api-access-56j9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:40:50 crc kubenswrapper[4774]: I1121 15:40:50.823677 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56j9b\" (UniqueName: \"kubernetes.io/projected/a1c02231-5d57-41bf-927b-e4443ef9bc99-kube-api-access-56j9b\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:51 crc kubenswrapper[4774]: I1121 15:40:51.277704 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-666b-account-create-4zsxl" event={"ID":"a1c02231-5d57-41bf-927b-e4443ef9bc99","Type":"ContainerDied","Data":"84889c057076f1f2009ea71512a0dc461e8b510939aa5cc52431a0c1ce2de6cd"} Nov 21 15:40:51 crc kubenswrapper[4774]: I1121 15:40:51.277755 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84889c057076f1f2009ea71512a0dc461e8b510939aa5cc52431a0c1ce2de6cd" Nov 21 15:40:51 crc kubenswrapper[4774]: I1121 15:40:51.277764 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-666b-account-create-4zsxl" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.240078 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-persistence-db-create-clqtq"] Nov 21 15:40:53 crc kubenswrapper[4774]: E1121 15:40:53.240848 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87c9a523-0280-46a5-98da-f9de20a6fbd1" containerName="mariadb-database-create" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.240864 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="87c9a523-0280-46a5-98da-f9de20a6fbd1" containerName="mariadb-database-create" Nov 21 15:40:53 crc kubenswrapper[4774]: E1121 15:40:53.240885 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1c02231-5d57-41bf-927b-e4443ef9bc99" containerName="mariadb-account-create" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.240893 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1c02231-5d57-41bf-927b-e4443ef9bc99" containerName="mariadb-account-create" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.241124 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1c02231-5d57-41bf-927b-e4443ef9bc99" containerName="mariadb-account-create" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.241144 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="87c9a523-0280-46a5-98da-f9de20a6fbd1" containerName="mariadb-database-create" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.241925 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.248474 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-clqtq"] Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.375191 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f37cd5cb-111d-4928-9348-d029977285e6-operator-scripts\") pod \"octavia-persistence-db-create-clqtq\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.375245 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4zp9\" (UniqueName: \"kubernetes.io/projected/f37cd5cb-111d-4928-9348-d029977285e6-kube-api-access-d4zp9\") pod \"octavia-persistence-db-create-clqtq\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.477051 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f37cd5cb-111d-4928-9348-d029977285e6-operator-scripts\") pod \"octavia-persistence-db-create-clqtq\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.477133 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4zp9\" (UniqueName: \"kubernetes.io/projected/f37cd5cb-111d-4928-9348-d029977285e6-kube-api-access-d4zp9\") pod \"octavia-persistence-db-create-clqtq\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.477916 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f37cd5cb-111d-4928-9348-d029977285e6-operator-scripts\") pod \"octavia-persistence-db-create-clqtq\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.506643 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4zp9\" (UniqueName: \"kubernetes.io/projected/f37cd5cb-111d-4928-9348-d029977285e6-kube-api-access-d4zp9\") pod \"octavia-persistence-db-create-clqtq\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:53 crc kubenswrapper[4774]: I1121 15:40:53.559196 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.040622 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-persistence-db-create-clqtq"] Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.305889 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-clqtq" event={"ID":"f37cd5cb-111d-4928-9348-d029977285e6","Type":"ContainerStarted","Data":"b9928033a3d501999626c0f84cedf85c94e2622480d8a399e5039c1dc827c3d4"} Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.305935 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-clqtq" event={"ID":"f37cd5cb-111d-4928-9348-d029977285e6","Type":"ContainerStarted","Data":"465644f68da4d757f650e95ede8d08aa60498fcfa802b562bd47c004c4787498"} Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.326227 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-persistence-db-create-clqtq" podStartSLOduration=1.32620584 podStartE2EDuration="1.32620584s" podCreationTimestamp="2025-11-21 15:40:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:40:54.321923908 +0000 UTC m=+5844.974123187" watchObservedRunningTime="2025-11-21 15:40:54.32620584 +0000 UTC m=+5844.978405099" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.418307 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-8b7b-account-create-pmwb2"] Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.419655 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.421553 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-persistence-db-secret" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.427139 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-8b7b-account-create-pmwb2"] Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.505438 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-operator-scripts\") pod \"octavia-8b7b-account-create-pmwb2\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.505754 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb2qn\" (UniqueName: \"kubernetes.io/projected/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-kube-api-access-wb2qn\") pod \"octavia-8b7b-account-create-pmwb2\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.608060 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb2qn\" (UniqueName: \"kubernetes.io/projected/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-kube-api-access-wb2qn\") pod \"octavia-8b7b-account-create-pmwb2\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.608248 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-operator-scripts\") pod \"octavia-8b7b-account-create-pmwb2\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.609359 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-operator-scripts\") pod \"octavia-8b7b-account-create-pmwb2\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.640778 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb2qn\" (UniqueName: \"kubernetes.io/projected/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-kube-api-access-wb2qn\") pod \"octavia-8b7b-account-create-pmwb2\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:54 crc kubenswrapper[4774]: I1121 15:40:54.770188 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:55 crc kubenswrapper[4774]: I1121 15:40:55.225210 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-8b7b-account-create-pmwb2"] Nov 21 15:40:55 crc kubenswrapper[4774]: I1121 15:40:55.329303 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8b7b-account-create-pmwb2" event={"ID":"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197","Type":"ContainerStarted","Data":"4f4d40ececd35bd68305ce578204b4d0cfc3c81b813506fc3e4d173f42b0c4aa"} Nov 21 15:40:55 crc kubenswrapper[4774]: I1121 15:40:55.331450 4774 generic.go:334] "Generic (PLEG): container finished" podID="f37cd5cb-111d-4928-9348-d029977285e6" containerID="b9928033a3d501999626c0f84cedf85c94e2622480d8a399e5039c1dc827c3d4" exitCode=0 Nov 21 15:40:55 crc kubenswrapper[4774]: I1121 15:40:55.331505 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-clqtq" event={"ID":"f37cd5cb-111d-4928-9348-d029977285e6","Type":"ContainerDied","Data":"b9928033a3d501999626c0f84cedf85c94e2622480d8a399e5039c1dc827c3d4"} Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.341926 4774 generic.go:334] "Generic (PLEG): container finished" podID="b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" containerID="62c95dd6def82acb076fbf5bfa8a700a9e11539a07617b496f40292f855c385d" exitCode=0 Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.342024 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8b7b-account-create-pmwb2" event={"ID":"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197","Type":"ContainerDied","Data":"62c95dd6def82acb076fbf5bfa8a700a9e11539a07617b496f40292f855c385d"} Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.689187 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.853095 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f37cd5cb-111d-4928-9348-d029977285e6-operator-scripts\") pod \"f37cd5cb-111d-4928-9348-d029977285e6\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.853287 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4zp9\" (UniqueName: \"kubernetes.io/projected/f37cd5cb-111d-4928-9348-d029977285e6-kube-api-access-d4zp9\") pod \"f37cd5cb-111d-4928-9348-d029977285e6\" (UID: \"f37cd5cb-111d-4928-9348-d029977285e6\") " Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.853782 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f37cd5cb-111d-4928-9348-d029977285e6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f37cd5cb-111d-4928-9348-d029977285e6" (UID: "f37cd5cb-111d-4928-9348-d029977285e6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.854172 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f37cd5cb-111d-4928-9348-d029977285e6-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.857965 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f37cd5cb-111d-4928-9348-d029977285e6-kube-api-access-d4zp9" (OuterVolumeSpecName: "kube-api-access-d4zp9") pod "f37cd5cb-111d-4928-9348-d029977285e6" (UID: "f37cd5cb-111d-4928-9348-d029977285e6"). InnerVolumeSpecName "kube-api-access-d4zp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:40:56 crc kubenswrapper[4774]: I1121 15:40:56.956496 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4zp9\" (UniqueName: \"kubernetes.io/projected/f37cd5cb-111d-4928-9348-d029977285e6-kube-api-access-d4zp9\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.354371 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-persistence-db-create-clqtq" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.354360 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-persistence-db-create-clqtq" event={"ID":"f37cd5cb-111d-4928-9348-d029977285e6","Type":"ContainerDied","Data":"465644f68da4d757f650e95ede8d08aa60498fcfa802b562bd47c004c4787498"} Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.354448 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="465644f68da4d757f650e95ede8d08aa60498fcfa802b562bd47c004c4787498" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.723918 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.873489 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-operator-scripts\") pod \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.873744 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wb2qn\" (UniqueName: \"kubernetes.io/projected/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-kube-api-access-wb2qn\") pod \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\" (UID: \"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197\") " Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.874033 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" (UID: "b6f9b0d7-bd7c-4b92-8b65-f8c063e09197"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.874675 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.877684 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-kube-api-access-wb2qn" (OuterVolumeSpecName: "kube-api-access-wb2qn") pod "b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" (UID: "b6f9b0d7-bd7c-4b92-8b65-f8c063e09197"). InnerVolumeSpecName "kube-api-access-wb2qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:40:57 crc kubenswrapper[4774]: I1121 15:40:57.977452 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wb2qn\" (UniqueName: \"kubernetes.io/projected/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197-kube-api-access-wb2qn\") on node \"crc\" DevicePath \"\"" Nov 21 15:40:58 crc kubenswrapper[4774]: I1121 15:40:58.364832 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-8b7b-account-create-pmwb2" event={"ID":"b6f9b0d7-bd7c-4b92-8b65-f8c063e09197","Type":"ContainerDied","Data":"4f4d40ececd35bd68305ce578204b4d0cfc3c81b813506fc3e4d173f42b0c4aa"} Nov 21 15:40:58 crc kubenswrapper[4774]: I1121 15:40:58.364886 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f4d40ececd35bd68305ce578204b4d0cfc3c81b813506fc3e4d173f42b0c4aa" Nov 21 15:40:58 crc kubenswrapper[4774]: I1121 15:40:58.364917 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-8b7b-account-create-pmwb2" Nov 21 15:40:59 crc kubenswrapper[4774]: I1121 15:40:59.601314 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:40:59 crc kubenswrapper[4774]: I1121 15:40:59.601733 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:40:59 crc kubenswrapper[4774]: I1121 15:40:59.601804 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:40:59 crc kubenswrapper[4774]: I1121 15:40:59.603019 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:40:59 crc kubenswrapper[4774]: I1121 15:40:59.603130 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" gracePeriod=600 Nov 21 15:40:59 crc kubenswrapper[4774]: E1121 15:40:59.730400 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.397851 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" exitCode=0 Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.398208 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70"} Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.398252 4774 scope.go:117] "RemoveContainer" containerID="4849e8220ea36f19f58def5dba0778aa648235f180867dd4feddda2e2ae19099" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.399016 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:41:00 crc kubenswrapper[4774]: E1121 15:41:00.399309 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.431446 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-api-58f57bccc4-t26gh"] Nov 21 15:41:00 crc kubenswrapper[4774]: E1121 15:41:00.431933 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" containerName="mariadb-account-create" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.431954 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" containerName="mariadb-account-create" Nov 21 15:41:00 crc kubenswrapper[4774]: E1121 15:41:00.431970 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f37cd5cb-111d-4928-9348-d029977285e6" containerName="mariadb-database-create" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.431978 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f37cd5cb-111d-4928-9348-d029977285e6" containerName="mariadb-database-create" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.432243 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f37cd5cb-111d-4928-9348-d029977285e6" containerName="mariadb-database-create" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.432266 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" containerName="mariadb-account-create" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.434001 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.440296 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-config-data" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.440917 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-octavia-dockercfg-24wsw" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.441071 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-api-scripts" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.450003 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-58f57bccc4-t26gh"] Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.538474 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-scripts\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.538585 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-octavia-run\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.538643 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-config-data-merged\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.538809 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-combined-ca-bundle\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.538857 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-config-data\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.640962 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-combined-ca-bundle\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.641016 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-config-data\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.641050 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-scripts\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.641110 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-octavia-run\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.641140 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-config-data-merged\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.641597 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-config-data-merged\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.642211 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"octavia-run\" (UniqueName: \"kubernetes.io/empty-dir/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-octavia-run\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.647600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-config-data\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.647614 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-scripts\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.647669 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d7b8c2e-d524-42cf-bb67-3fb652fa55a5-combined-ca-bundle\") pod \"octavia-api-58f57bccc4-t26gh\" (UID: \"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5\") " pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:00 crc kubenswrapper[4774]: I1121 15:41:00.763257 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:01 crc kubenswrapper[4774]: W1121 15:41:01.243440 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d7b8c2e_d524_42cf_bb67_3fb652fa55a5.slice/crio-c4c6032d45473e9614e0eabd1df777d63eada06a63c6c6a562423fd1cb031462 WatchSource:0}: Error finding container c4c6032d45473e9614e0eabd1df777d63eada06a63c6c6a562423fd1cb031462: Status 404 returned error can't find the container with id c4c6032d45473e9614e0eabd1df777d63eada06a63c6c6a562423fd1cb031462 Nov 21 15:41:01 crc kubenswrapper[4774]: I1121 15:41:01.249552 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-api-58f57bccc4-t26gh"] Nov 21 15:41:01 crc kubenswrapper[4774]: I1121 15:41:01.408768 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-58f57bccc4-t26gh" event={"ID":"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5","Type":"ContainerStarted","Data":"c4c6032d45473e9614e0eabd1df777d63eada06a63c6c6a562423fd1cb031462"} Nov 21 15:41:10 crc kubenswrapper[4774]: I1121 15:41:10.497369 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-58f57bccc4-t26gh" event={"ID":"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5","Type":"ContainerStarted","Data":"a50685615a2c30c65157239f5eba443bb2212b6c90867d00f36186f7ec29437e"} Nov 21 15:41:11 crc kubenswrapper[4774]: I1121 15:41:11.509948 4774 generic.go:334] "Generic (PLEG): container finished" podID="7d7b8c2e-d524-42cf-bb67-3fb652fa55a5" containerID="a50685615a2c30c65157239f5eba443bb2212b6c90867d00f36186f7ec29437e" exitCode=0 Nov 21 15:41:11 crc kubenswrapper[4774]: I1121 15:41:11.510528 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-58f57bccc4-t26gh" event={"ID":"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5","Type":"ContainerDied","Data":"a50685615a2c30c65157239f5eba443bb2212b6c90867d00f36186f7ec29437e"} Nov 21 15:41:12 crc kubenswrapper[4774]: I1121 15:41:12.524854 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-58f57bccc4-t26gh" event={"ID":"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5","Type":"ContainerStarted","Data":"5120712407bc348e932ca0221fc67c19378ab124e0409f5c775cacf601a454ed"} Nov 21 15:41:12 crc kubenswrapper[4774]: I1121 15:41:12.525237 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-api-58f57bccc4-t26gh" event={"ID":"7d7b8c2e-d524-42cf-bb67-3fb652fa55a5","Type":"ContainerStarted","Data":"b2d3e4d7e21ce4632e272241852cc402c5709c00fb69caab321e45f920592b6a"} Nov 21 15:41:12 crc kubenswrapper[4774]: I1121 15:41:12.525615 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:12 crc kubenswrapper[4774]: I1121 15:41:12.527966 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:12 crc kubenswrapper[4774]: I1121 15:41:12.555205 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-api-58f57bccc4-t26gh" podStartSLOduration=3.577952494 podStartE2EDuration="12.555186661s" podCreationTimestamp="2025-11-21 15:41:00 +0000 UTC" firstStartedPulling="2025-11-21 15:41:01.246203974 +0000 UTC m=+5851.898403243" lastFinishedPulling="2025-11-21 15:41:10.223438151 +0000 UTC m=+5860.875637410" observedRunningTime="2025-11-21 15:41:12.547752629 +0000 UTC m=+5863.199951888" watchObservedRunningTime="2025-11-21 15:41:12.555186661 +0000 UTC m=+5863.207385920" Nov 21 15:41:13 crc kubenswrapper[4774]: I1121 15:41:13.197434 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mkddd" podUID="7e9568d8-f5a4-4bd2-93a2-08df43d611e7" containerName="ovn-controller" probeResult="failure" output=< Nov 21 15:41:13 crc kubenswrapper[4774]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 21 15:41:13 crc kubenswrapper[4774]: > Nov 21 15:41:15 crc kubenswrapper[4774]: I1121 15:41:15.093991 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:41:15 crc kubenswrapper[4774]: E1121 15:41:15.094317 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.202211 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mkddd" podUID="7e9568d8-f5a4-4bd2-93a2-08df43d611e7" containerName="ovn-controller" probeResult="failure" output=< Nov 21 15:41:18 crc kubenswrapper[4774]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Nov 21 15:41:18 crc kubenswrapper[4774]: > Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.244568 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.252667 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-svdd5" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.395801 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mkddd-config-xlr4f"] Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.397043 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.398716 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.411516 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mkddd-config-xlr4f"] Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.519680 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5fb8\" (UniqueName: \"kubernetes.io/projected/27ec78d9-35fc-47bf-99ce-6339eca15b6f-kube-api-access-j5fb8\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.519781 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run-ovn\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.519900 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-log-ovn\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.520097 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.520221 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-additional-scripts\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.520306 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-scripts\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623059 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623214 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-additional-scripts\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623334 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-scripts\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623406 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623449 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5fb8\" (UniqueName: \"kubernetes.io/projected/27ec78d9-35fc-47bf-99ce-6339eca15b6f-kube-api-access-j5fb8\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623610 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run-ovn\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623687 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-log-ovn\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.623961 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-log-ovn\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.624002 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run-ovn\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.624222 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-additional-scripts\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.626257 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-scripts\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.653515 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5fb8\" (UniqueName: \"kubernetes.io/projected/27ec78d9-35fc-47bf-99ce-6339eca15b6f-kube-api-access-j5fb8\") pod \"ovn-controller-mkddd-config-xlr4f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.715638 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.928881 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-rsyslog-c48wg"] Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.930794 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.946948 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-config-data" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.947127 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-rsyslog-scripts" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.947423 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"octavia-hmport-map" Nov 21 15:41:18 crc kubenswrapper[4774]: I1121 15:41:18.953675 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-c48wg"] Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.040352 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-hm-ports\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.040414 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-config-data\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.040463 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-config-data-merged\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.040698 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-scripts\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.142674 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-hm-ports\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.142726 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-config-data\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.142765 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-config-data-merged\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.142848 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-scripts\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.143550 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-config-data-merged\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.144047 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-hm-ports\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.148862 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-config-data\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.148880 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8-scripts\") pod \"octavia-rsyslog-c48wg\" (UID: \"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8\") " pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.250855 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mkddd-config-xlr4f"] Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.251969 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.606799 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mkddd-config-xlr4f" event={"ID":"27ec78d9-35fc-47bf-99ce-6339eca15b6f","Type":"ContainerStarted","Data":"d90d8be32615624afbdf310e146a9bb1df1cf0803f7beaac1ace05ef736f1c7a"} Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.626092 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7pj5"] Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.628333 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.633266 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.651995 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7pj5"] Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.756572 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/0de430ee-eb59-499c-917e-17c2ea137d1b-amphora-image\") pod \"octavia-image-upload-5955f5554b-z7pj5\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.757044 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0de430ee-eb59-499c-917e-17c2ea137d1b-httpd-config\") pod \"octavia-image-upload-5955f5554b-z7pj5\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.814465 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-rsyslog-c48wg"] Nov 21 15:41:19 crc kubenswrapper[4774]: W1121 15:41:19.817989 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecccdf94_54d3_44a1_b56c_7f5d9ed64ac8.slice/crio-293b594a51415e88cfd875137c2717b9466d716a358b1f5224a55487790703ca WatchSource:0}: Error finding container 293b594a51415e88cfd875137c2717b9466d716a358b1f5224a55487790703ca: Status 404 returned error can't find the container with id 293b594a51415e88cfd875137c2717b9466d716a358b1f5224a55487790703ca Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.858675 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/0de430ee-eb59-499c-917e-17c2ea137d1b-amphora-image\") pod \"octavia-image-upload-5955f5554b-z7pj5\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.858766 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0de430ee-eb59-499c-917e-17c2ea137d1b-httpd-config\") pod \"octavia-image-upload-5955f5554b-z7pj5\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.860106 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/0de430ee-eb59-499c-917e-17c2ea137d1b-amphora-image\") pod \"octavia-image-upload-5955f5554b-z7pj5\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:19 crc kubenswrapper[4774]: I1121 15:41:19.865006 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0de430ee-eb59-499c-917e-17c2ea137d1b-httpd-config\") pod \"octavia-image-upload-5955f5554b-z7pj5\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:20 crc kubenswrapper[4774]: I1121 15:41:20.002751 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:41:20 crc kubenswrapper[4774]: I1121 15:41:20.465374 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7pj5"] Nov 21 15:41:20 crc kubenswrapper[4774]: W1121 15:41:20.465810 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0de430ee_eb59_499c_917e_17c2ea137d1b.slice/crio-2bdf6b7cf999747c233bee6ed543b749040fabeb829395ef9b86a806a7e8cc33 WatchSource:0}: Error finding container 2bdf6b7cf999747c233bee6ed543b749040fabeb829395ef9b86a806a7e8cc33: Status 404 returned error can't find the container with id 2bdf6b7cf999747c233bee6ed543b749040fabeb829395ef9b86a806a7e8cc33 Nov 21 15:41:20 crc kubenswrapper[4774]: I1121 15:41:20.615682 4774 generic.go:334] "Generic (PLEG): container finished" podID="27ec78d9-35fc-47bf-99ce-6339eca15b6f" containerID="e721c766a96ccb94f7e98eb9fccb4ed527e375ba1766fdacf8c73602a9193555" exitCode=0 Nov 21 15:41:20 crc kubenswrapper[4774]: I1121 15:41:20.615750 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mkddd-config-xlr4f" event={"ID":"27ec78d9-35fc-47bf-99ce-6339eca15b6f","Type":"ContainerDied","Data":"e721c766a96ccb94f7e98eb9fccb4ed527e375ba1766fdacf8c73602a9193555"} Nov 21 15:41:20 crc kubenswrapper[4774]: I1121 15:41:20.617980 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c48wg" event={"ID":"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8","Type":"ContainerStarted","Data":"293b594a51415e88cfd875137c2717b9466d716a358b1f5224a55487790703ca"} Nov 21 15:41:20 crc kubenswrapper[4774]: I1121 15:41:20.619302 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" event={"ID":"0de430ee-eb59-499c-917e-17c2ea137d1b","Type":"ContainerStarted","Data":"2bdf6b7cf999747c233bee6ed543b749040fabeb829395ef9b86a806a7e8cc33"} Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.006119 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104462 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-log-ovn\") pod \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104508 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-scripts\") pod \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104560 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "27ec78d9-35fc-47bf-99ce-6339eca15b6f" (UID: "27ec78d9-35fc-47bf-99ce-6339eca15b6f"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104694 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run\") pod \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104721 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run-ovn\") pod \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104750 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5fb8\" (UniqueName: \"kubernetes.io/projected/27ec78d9-35fc-47bf-99ce-6339eca15b6f-kube-api-access-j5fb8\") pod \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104790 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run" (OuterVolumeSpecName: "var-run") pod "27ec78d9-35fc-47bf-99ce-6339eca15b6f" (UID: "27ec78d9-35fc-47bf-99ce-6339eca15b6f"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104809 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "27ec78d9-35fc-47bf-99ce-6339eca15b6f" (UID: "27ec78d9-35fc-47bf-99ce-6339eca15b6f"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.104808 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-additional-scripts\") pod \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\" (UID: \"27ec78d9-35fc-47bf-99ce-6339eca15b6f\") " Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.105279 4774 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.105307 4774 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.105320 4774 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/27ec78d9-35fc-47bf-99ce-6339eca15b6f-var-log-ovn\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.105690 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "27ec78d9-35fc-47bf-99ce-6339eca15b6f" (UID: "27ec78d9-35fc-47bf-99ce-6339eca15b6f"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.105853 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-scripts" (OuterVolumeSpecName: "scripts") pod "27ec78d9-35fc-47bf-99ce-6339eca15b6f" (UID: "27ec78d9-35fc-47bf-99ce-6339eca15b6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.110044 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ec78d9-35fc-47bf-99ce-6339eca15b6f-kube-api-access-j5fb8" (OuterVolumeSpecName: "kube-api-access-j5fb8") pod "27ec78d9-35fc-47bf-99ce-6339eca15b6f" (UID: "27ec78d9-35fc-47bf-99ce-6339eca15b6f"). InnerVolumeSpecName "kube-api-access-j5fb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.208148 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5fb8\" (UniqueName: \"kubernetes.io/projected/27ec78d9-35fc-47bf-99ce-6339eca15b6f-kube-api-access-j5fb8\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.208199 4774 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-additional-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.208218 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27ec78d9-35fc-47bf-99ce-6339eca15b6f-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.640537 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mkddd-config-xlr4f" event={"ID":"27ec78d9-35fc-47bf-99ce-6339eca15b6f","Type":"ContainerDied","Data":"d90d8be32615624afbdf310e146a9bb1df1cf0803f7beaac1ace05ef736f1c7a"} Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.640916 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d90d8be32615624afbdf310e146a9bb1df1cf0803f7beaac1ace05ef736f1c7a" Nov 21 15:41:22 crc kubenswrapper[4774]: I1121 15:41:22.640627 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mkddd-config-xlr4f" Nov 21 15:41:23 crc kubenswrapper[4774]: I1121 15:41:23.102588 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-mkddd-config-xlr4f"] Nov 21 15:41:23 crc kubenswrapper[4774]: I1121 15:41:23.111008 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-mkddd-config-xlr4f"] Nov 21 15:41:23 crc kubenswrapper[4774]: I1121 15:41:23.200468 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-mkddd" Nov 21 15:41:24 crc kubenswrapper[4774]: I1121 15:41:24.103332 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ec78d9-35fc-47bf-99ce-6339eca15b6f" path="/var/lib/kubelet/pods/27ec78d9-35fc-47bf-99ce-6339eca15b6f/volumes" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.693113 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c48wg" event={"ID":"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8","Type":"ContainerStarted","Data":"744ed339dc1a405334734f846abed7f1185228bcdd7a76184e6c43f9b8a08326"} Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.841844 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-db-sync-79fml"] Nov 21 15:41:26 crc kubenswrapper[4774]: E1121 15:41:26.842284 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ec78d9-35fc-47bf-99ce-6339eca15b6f" containerName="ovn-config" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.842297 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ec78d9-35fc-47bf-99ce-6339eca15b6f" containerName="ovn-config" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.842518 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ec78d9-35fc-47bf-99ce-6339eca15b6f" containerName="ovn-config" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.843613 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.850707 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-scripts" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.868622 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-79fml"] Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.939523 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.939614 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-combined-ca-bundle\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.939785 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-scripts\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:26 crc kubenswrapper[4774]: I1121 15:41:26.939846 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data-merged\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.041364 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-scripts\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.041776 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data-merged\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.041903 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.041949 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-combined-ca-bundle\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.042287 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data-merged\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.048113 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.059962 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-scripts\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.062526 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-combined-ca-bundle\") pod \"octavia-db-sync-79fml\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.176604 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:27 crc kubenswrapper[4774]: I1121 15:41:27.895809 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-db-sync-79fml"] Nov 21 15:41:28 crc kubenswrapper[4774]: I1121 15:41:28.722560 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-79fml" event={"ID":"54619bd5-89f7-432a-8ad7-38574bfe4a1e","Type":"ContainerStarted","Data":"e6ccb283ce0bea692497d58ef4daece6336c54f35f70e7c256e48a843da117cf"} Nov 21 15:41:28 crc kubenswrapper[4774]: I1121 15:41:28.723326 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-79fml" event={"ID":"54619bd5-89f7-432a-8ad7-38574bfe4a1e","Type":"ContainerStarted","Data":"22610d9da0b3baed9bff982f760c77beba9b88463276d3fe5f330b99e2f60589"} Nov 21 15:41:29 crc kubenswrapper[4774]: I1121 15:41:29.736391 4774 generic.go:334] "Generic (PLEG): container finished" podID="ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8" containerID="744ed339dc1a405334734f846abed7f1185228bcdd7a76184e6c43f9b8a08326" exitCode=0 Nov 21 15:41:29 crc kubenswrapper[4774]: I1121 15:41:29.736474 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c48wg" event={"ID":"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8","Type":"ContainerDied","Data":"744ed339dc1a405334734f846abed7f1185228bcdd7a76184e6c43f9b8a08326"} Nov 21 15:41:29 crc kubenswrapper[4774]: I1121 15:41:29.738780 4774 generic.go:334] "Generic (PLEG): container finished" podID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerID="e6ccb283ce0bea692497d58ef4daece6336c54f35f70e7c256e48a843da117cf" exitCode=0 Nov 21 15:41:29 crc kubenswrapper[4774]: I1121 15:41:29.738807 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-79fml" event={"ID":"54619bd5-89f7-432a-8ad7-38574bfe4a1e","Type":"ContainerDied","Data":"e6ccb283ce0bea692497d58ef4daece6336c54f35f70e7c256e48a843da117cf"} Nov 21 15:41:30 crc kubenswrapper[4774]: I1121 15:41:30.100394 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:41:30 crc kubenswrapper[4774]: E1121 15:41:30.101402 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:41:31 crc kubenswrapper[4774]: I1121 15:41:31.761167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-79fml" event={"ID":"54619bd5-89f7-432a-8ad7-38574bfe4a1e","Type":"ContainerStarted","Data":"a8ab46e05a1400da72c6dbb0573223d045f8896d56efd66c53c8b57fda3a54ed"} Nov 21 15:41:31 crc kubenswrapper[4774]: I1121 15:41:31.780158 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-db-sync-79fml" podStartSLOduration=5.780139125 podStartE2EDuration="5.780139125s" podCreationTimestamp="2025-11-21 15:41:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:41:31.778723605 +0000 UTC m=+5882.430922864" watchObservedRunningTime="2025-11-21 15:41:31.780139125 +0000 UTC m=+5882.432338384" Nov 21 15:41:39 crc kubenswrapper[4774]: I1121 15:41:39.168016 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:39 crc kubenswrapper[4774]: I1121 15:41:39.173108 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-api-58f57bccc4-t26gh" Nov 21 15:41:40 crc kubenswrapper[4774]: I1121 15:41:40.850810 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-rsyslog-c48wg" event={"ID":"ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8","Type":"ContainerStarted","Data":"0329365eb53be3163b1485a8d490fb3186e6b1c25b425f8559fe5a689ef4f999"} Nov 21 15:41:40 crc kubenswrapper[4774]: I1121 15:41:40.851440 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:40 crc kubenswrapper[4774]: I1121 15:41:40.853740 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" event={"ID":"0de430ee-eb59-499c-917e-17c2ea137d1b","Type":"ContainerStarted","Data":"1682bd0b8689c00739ce990dfd74c1a8ff730e2d65e2b477f3541a6723d15069"} Nov 21 15:41:40 crc kubenswrapper[4774]: I1121 15:41:40.868805 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-rsyslog-c48wg" podStartSLOduration=2.639558958 podStartE2EDuration="22.868776059s" podCreationTimestamp="2025-11-21 15:41:18 +0000 UTC" firstStartedPulling="2025-11-21 15:41:19.81979471 +0000 UTC m=+5870.471993969" lastFinishedPulling="2025-11-21 15:41:40.049011811 +0000 UTC m=+5890.701211070" observedRunningTime="2025-11-21 15:41:40.866568317 +0000 UTC m=+5891.518767586" watchObservedRunningTime="2025-11-21 15:41:40.868776059 +0000 UTC m=+5891.520975358" Nov 21 15:41:41 crc kubenswrapper[4774]: I1121 15:41:41.094268 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:41:41 crc kubenswrapper[4774]: E1121 15:41:41.094575 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:41:41 crc kubenswrapper[4774]: I1121 15:41:41.544040 4774 scope.go:117] "RemoveContainer" containerID="fc6400a52eb124a4cc62c6db857453863736ba23593e5f02c9170b77bbe5961e" Nov 21 15:41:41 crc kubenswrapper[4774]: I1121 15:41:41.581901 4774 scope.go:117] "RemoveContainer" containerID="602325a616e3e9bcecd3ccac45856c01f3b925d86144ce97074f56387316f86b" Nov 21 15:41:41 crc kubenswrapper[4774]: I1121 15:41:41.648025 4774 scope.go:117] "RemoveContainer" containerID="966a5461e4d49c39bc80232d65ecf48a60b6552d861302cfce95a774c52b0454" Nov 21 15:41:43 crc kubenswrapper[4774]: I1121 15:41:43.884141 4774 generic.go:334] "Generic (PLEG): container finished" podID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerID="a8ab46e05a1400da72c6dbb0573223d045f8896d56efd66c53c8b57fda3a54ed" exitCode=0 Nov 21 15:41:43 crc kubenswrapper[4774]: I1121 15:41:43.884220 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-79fml" event={"ID":"54619bd5-89f7-432a-8ad7-38574bfe4a1e","Type":"ContainerDied","Data":"a8ab46e05a1400da72c6dbb0573223d045f8896d56efd66c53c8b57fda3a54ed"} Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.461163 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.519024 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-scripts\") pod \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.519102 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-combined-ca-bundle\") pod \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.519162 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data-merged\") pod \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.519278 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data\") pod \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\" (UID: \"54619bd5-89f7-432a-8ad7-38574bfe4a1e\") " Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.524929 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-scripts" (OuterVolumeSpecName: "scripts") pod "54619bd5-89f7-432a-8ad7-38574bfe4a1e" (UID: "54619bd5-89f7-432a-8ad7-38574bfe4a1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.530520 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data" (OuterVolumeSpecName: "config-data") pod "54619bd5-89f7-432a-8ad7-38574bfe4a1e" (UID: "54619bd5-89f7-432a-8ad7-38574bfe4a1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.549016 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54619bd5-89f7-432a-8ad7-38574bfe4a1e" (UID: "54619bd5-89f7-432a-8ad7-38574bfe4a1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.551744 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data-merged" (OuterVolumeSpecName: "config-data-merged") pod "54619bd5-89f7-432a-8ad7-38574bfe4a1e" (UID: "54619bd5-89f7-432a-8ad7-38574bfe4a1e"). InnerVolumeSpecName "config-data-merged". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.622646 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.622702 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.622716 4774 reconciler_common.go:293] "Volume detached for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data-merged\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.622727 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54619bd5-89f7-432a-8ad7-38574bfe4a1e-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.906788 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-db-sync-79fml" event={"ID":"54619bd5-89f7-432a-8ad7-38574bfe4a1e","Type":"ContainerDied","Data":"22610d9da0b3baed9bff982f760c77beba9b88463276d3fe5f330b99e2f60589"} Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.906852 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22610d9da0b3baed9bff982f760c77beba9b88463276d3fe5f330b99e2f60589" Nov 21 15:41:45 crc kubenswrapper[4774]: I1121 15:41:45.906955 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-db-sync-79fml" Nov 21 15:41:47 crc kubenswrapper[4774]: I1121 15:41:47.927603 4774 generic.go:334] "Generic (PLEG): container finished" podID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerID="1682bd0b8689c00739ce990dfd74c1a8ff730e2d65e2b477f3541a6723d15069" exitCode=0 Nov 21 15:41:47 crc kubenswrapper[4774]: I1121 15:41:47.927855 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" event={"ID":"0de430ee-eb59-499c-917e-17c2ea137d1b","Type":"ContainerDied","Data":"1682bd0b8689c00739ce990dfd74c1a8ff730e2d65e2b477f3541a6723d15069"} Nov 21 15:41:48 crc kubenswrapper[4774]: I1121 15:41:48.972014 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" event={"ID":"0de430ee-eb59-499c-917e-17c2ea137d1b","Type":"ContainerStarted","Data":"e3726a6f3973548e690b60cb28d51cbe8167f16fbc0b26ca8a880b029559ef59"} Nov 21 15:41:49 crc kubenswrapper[4774]: I1121 15:41:49.299221 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-rsyslog-c48wg" Nov 21 15:41:49 crc kubenswrapper[4774]: I1121 15:41:49.323593 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" podStartSLOduration=10.746702029 podStartE2EDuration="30.323570854s" podCreationTimestamp="2025-11-21 15:41:19 +0000 UTC" firstStartedPulling="2025-11-21 15:41:20.467534804 +0000 UTC m=+5871.119734063" lastFinishedPulling="2025-11-21 15:41:40.044403619 +0000 UTC m=+5890.696602888" observedRunningTime="2025-11-21 15:41:48.993709491 +0000 UTC m=+5899.645908750" watchObservedRunningTime="2025-11-21 15:41:49.323570854 +0000 UTC m=+5899.975770113" Nov 21 15:41:54 crc kubenswrapper[4774]: I1121 15:41:54.093550 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:41:54 crc kubenswrapper[4774]: E1121 15:41:54.094495 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:42:08 crc kubenswrapper[4774]: I1121 15:42:08.097862 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:42:08 crc kubenswrapper[4774]: E1121 15:42:08.099194 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:42:12 crc kubenswrapper[4774]: I1121 15:42:12.808559 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7pj5"] Nov 21 15:42:12 crc kubenswrapper[4774]: I1121 15:42:12.810446 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerName="octavia-amphora-httpd" containerID="cri-o://e3726a6f3973548e690b60cb28d51cbe8167f16fbc0b26ca8a880b029559ef59" gracePeriod=30 Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.242385 4774 generic.go:334] "Generic (PLEG): container finished" podID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerID="e3726a6f3973548e690b60cb28d51cbe8167f16fbc0b26ca8a880b029559ef59" exitCode=0 Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.242490 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" event={"ID":"0de430ee-eb59-499c-917e-17c2ea137d1b","Type":"ContainerDied","Data":"e3726a6f3973548e690b60cb28d51cbe8167f16fbc0b26ca8a880b029559ef59"} Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.455903 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.508663 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0de430ee-eb59-499c-917e-17c2ea137d1b-httpd-config\") pod \"0de430ee-eb59-499c-917e-17c2ea137d1b\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.508771 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/0de430ee-eb59-499c-917e-17c2ea137d1b-amphora-image\") pod \"0de430ee-eb59-499c-917e-17c2ea137d1b\" (UID: \"0de430ee-eb59-499c-917e-17c2ea137d1b\") " Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.540682 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0de430ee-eb59-499c-917e-17c2ea137d1b-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "0de430ee-eb59-499c-917e-17c2ea137d1b" (UID: "0de430ee-eb59-499c-917e-17c2ea137d1b"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.570424 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0de430ee-eb59-499c-917e-17c2ea137d1b-amphora-image" (OuterVolumeSpecName: "amphora-image") pod "0de430ee-eb59-499c-917e-17c2ea137d1b" (UID: "0de430ee-eb59-499c-917e-17c2ea137d1b"). InnerVolumeSpecName "amphora-image". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.611867 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/0de430ee-eb59-499c-917e-17c2ea137d1b-httpd-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:42:13 crc kubenswrapper[4774]: I1121 15:42:13.611906 4774 reconciler_common.go:293] "Volume detached for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/0de430ee-eb59-499c-917e-17c2ea137d1b-amphora-image\") on node \"crc\" DevicePath \"\"" Nov 21 15:42:14 crc kubenswrapper[4774]: I1121 15:42:14.254312 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" event={"ID":"0de430ee-eb59-499c-917e-17c2ea137d1b","Type":"ContainerDied","Data":"2bdf6b7cf999747c233bee6ed543b749040fabeb829395ef9b86a806a7e8cc33"} Nov 21 15:42:14 crc kubenswrapper[4774]: I1121 15:42:14.254375 4774 scope.go:117] "RemoveContainer" containerID="e3726a6f3973548e690b60cb28d51cbe8167f16fbc0b26ca8a880b029559ef59" Nov 21 15:42:14 crc kubenswrapper[4774]: I1121 15:42:14.254534 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-5955f5554b-z7pj5" Nov 21 15:42:14 crc kubenswrapper[4774]: I1121 15:42:14.277483 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7pj5"] Nov 21 15:42:14 crc kubenswrapper[4774]: I1121 15:42:14.281292 4774 scope.go:117] "RemoveContainer" containerID="1682bd0b8689c00739ce990dfd74c1a8ff730e2d65e2b477f3541a6723d15069" Nov 21 15:42:14 crc kubenswrapper[4774]: I1121 15:42:14.286974 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7pj5"] Nov 21 15:42:16 crc kubenswrapper[4774]: I1121 15:42:16.113296 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" path="/var/lib/kubelet/pods/0de430ee-eb59-499c-917e-17c2ea137d1b/volumes" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.019865 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7sj5"] Nov 21 15:42:18 crc kubenswrapper[4774]: E1121 15:42:18.020548 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerName="init" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.020561 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerName="init" Nov 21 15:42:18 crc kubenswrapper[4774]: E1121 15:42:18.020582 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerName="init" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.020587 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerName="init" Nov 21 15:42:18 crc kubenswrapper[4774]: E1121 15:42:18.020600 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerName="octavia-db-sync" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.020606 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerName="octavia-db-sync" Nov 21 15:42:18 crc kubenswrapper[4774]: E1121 15:42:18.020619 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerName="octavia-amphora-httpd" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.020625 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerName="octavia-amphora-httpd" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.020881 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" containerName="octavia-db-sync" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.020908 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0de430ee-eb59-499c-917e-17c2ea137d1b" containerName="octavia-amphora-httpd" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.021958 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.025475 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-config-data" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.036452 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7sj5"] Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.103715 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2c727b3-da08-42a5-8b9c-364f73fbfc0e-httpd-config\") pod \"octavia-image-upload-5955f5554b-z7sj5\" (UID: \"a2c727b3-da08-42a5-8b9c-364f73fbfc0e\") " pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.103802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a2c727b3-da08-42a5-8b9c-364f73fbfc0e-amphora-image\") pod \"octavia-image-upload-5955f5554b-z7sj5\" (UID: \"a2c727b3-da08-42a5-8b9c-364f73fbfc0e\") " pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.205367 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2c727b3-da08-42a5-8b9c-364f73fbfc0e-httpd-config\") pod \"octavia-image-upload-5955f5554b-z7sj5\" (UID: \"a2c727b3-da08-42a5-8b9c-364f73fbfc0e\") " pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.205490 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a2c727b3-da08-42a5-8b9c-364f73fbfc0e-amphora-image\") pod \"octavia-image-upload-5955f5554b-z7sj5\" (UID: \"a2c727b3-da08-42a5-8b9c-364f73fbfc0e\") " pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.206405 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-image\" (UniqueName: \"kubernetes.io/empty-dir/a2c727b3-da08-42a5-8b9c-364f73fbfc0e-amphora-image\") pod \"octavia-image-upload-5955f5554b-z7sj5\" (UID: \"a2c727b3-da08-42a5-8b9c-364f73fbfc0e\") " pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.215055 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2c727b3-da08-42a5-8b9c-364f73fbfc0e-httpd-config\") pod \"octavia-image-upload-5955f5554b-z7sj5\" (UID: \"a2c727b3-da08-42a5-8b9c-364f73fbfc0e\") " pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.350691 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" Nov 21 15:42:18 crc kubenswrapper[4774]: I1121 15:42:18.798721 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-image-upload-5955f5554b-z7sj5"] Nov 21 15:42:18 crc kubenswrapper[4774]: W1121 15:42:18.805578 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2c727b3_da08_42a5_8b9c_364f73fbfc0e.slice/crio-fade39d4ab825f06e0963627615f574f2a96422d858fced91354a407d2288aa2 WatchSource:0}: Error finding container fade39d4ab825f06e0963627615f574f2a96422d858fced91354a407d2288aa2: Status 404 returned error can't find the container with id fade39d4ab825f06e0963627615f574f2a96422d858fced91354a407d2288aa2 Nov 21 15:42:19 crc kubenswrapper[4774]: I1121 15:42:19.312196 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" event={"ID":"a2c727b3-da08-42a5-8b9c-364f73fbfc0e","Type":"ContainerStarted","Data":"fade39d4ab825f06e0963627615f574f2a96422d858fced91354a407d2288aa2"} Nov 21 15:42:20 crc kubenswrapper[4774]: I1121 15:42:20.324645 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" event={"ID":"a2c727b3-da08-42a5-8b9c-364f73fbfc0e","Type":"ContainerStarted","Data":"c9d74d0c2d677b2c5f4b86790a64665435bd292d64db948cfbf386ccf80b5d56"} Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.093242 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:42:23 crc kubenswrapper[4774]: E1121 15:42:23.093718 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.361148 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2c727b3-da08-42a5-8b9c-364f73fbfc0e" containerID="c9d74d0c2d677b2c5f4b86790a64665435bd292d64db948cfbf386ccf80b5d56" exitCode=0 Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.361249 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" event={"ID":"a2c727b3-da08-42a5-8b9c-364f73fbfc0e","Type":"ContainerDied","Data":"c9d74d0c2d677b2c5f4b86790a64665435bd292d64db948cfbf386ccf80b5d56"} Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.503332 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-healthmanager-qhw6s"] Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.505485 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.509318 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-scripts" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.509962 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-healthmanager-config-data" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.510316 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-certs-secret" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.517912 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-qhw6s"] Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.613650 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/f680c438-04e4-45a2-9996-f6668b99065d-hm-ports\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.613698 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-scripts\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.613753 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-config-data\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.613771 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-amphora-certs\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.613834 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-combined-ca-bundle\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.613906 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f680c438-04e4-45a2-9996-f6668b99065d-config-data-merged\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.715484 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-combined-ca-bundle\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.715601 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f680c438-04e4-45a2-9996-f6668b99065d-config-data-merged\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.715651 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/f680c438-04e4-45a2-9996-f6668b99065d-hm-ports\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.715676 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-scripts\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.715726 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-config-data\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.715744 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-amphora-certs\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.717158 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/f680c438-04e4-45a2-9996-f6668b99065d-config-data-merged\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.717516 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/f680c438-04e4-45a2-9996-f6668b99065d-hm-ports\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.719993 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-scripts\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.720595 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-amphora-certs\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.728201 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-config-data\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.735808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f680c438-04e4-45a2-9996-f6668b99065d-combined-ca-bundle\") pod \"octavia-healthmanager-qhw6s\" (UID: \"f680c438-04e4-45a2-9996-f6668b99065d\") " pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:23 crc kubenswrapper[4774]: I1121 15:42:23.828673 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.357776 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-healthmanager-qhw6s"] Nov 21 15:42:24 crc kubenswrapper[4774]: W1121 15:42:24.357943 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf680c438_04e4_45a2_9996_f6668b99065d.slice/crio-068f6ed2ea8f18b8a34ec18c9727ab6bb0db1e22b185dc63bce19ed57350b8ad WatchSource:0}: Error finding container 068f6ed2ea8f18b8a34ec18c9727ab6bb0db1e22b185dc63bce19ed57350b8ad: Status 404 returned error can't find the container with id 068f6ed2ea8f18b8a34ec18c9727ab6bb0db1e22b185dc63bce19ed57350b8ad Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.384396 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-qhw6s" event={"ID":"f680c438-04e4-45a2-9996-f6668b99065d","Type":"ContainerStarted","Data":"068f6ed2ea8f18b8a34ec18c9727ab6bb0db1e22b185dc63bce19ed57350b8ad"} Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.386594 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" event={"ID":"a2c727b3-da08-42a5-8b9c-364f73fbfc0e","Type":"ContainerStarted","Data":"c90a631908395c8c8842539862b2fbb21d03b05d2d79980a8c65902a9ee13baa"} Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.423114 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-image-upload-5955f5554b-z7sj5" podStartSLOduration=7.018333364 podStartE2EDuration="7.423066432s" podCreationTimestamp="2025-11-21 15:42:17 +0000 UTC" firstStartedPulling="2025-11-21 15:42:18.80914092 +0000 UTC m=+5929.461340179" lastFinishedPulling="2025-11-21 15:42:19.213873988 +0000 UTC m=+5929.866073247" observedRunningTime="2025-11-21 15:42:24.405579704 +0000 UTC m=+5935.057778973" watchObservedRunningTime="2025-11-21 15:42:24.423066432 +0000 UTC m=+5935.075265701" Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.920405 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-housekeeping-kppw9"] Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.922548 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.925382 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-config-data" Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.925835 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-housekeeping-scripts" Nov 21 15:42:24 crc kubenswrapper[4774]: I1121 15:42:24.933592 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-kppw9"] Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.041180 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-config-data\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.041553 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-scripts\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.041701 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-amphora-certs\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.041745 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/e3f74551-4d82-4d29-adc1-9116631e39c0-hm-ports\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.041772 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-combined-ca-bundle\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.042037 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e3f74551-4d82-4d29-adc1-9116631e39c0-config-data-merged\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.144338 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-scripts\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.144429 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-amphora-certs\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.144461 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/e3f74551-4d82-4d29-adc1-9116631e39c0-hm-ports\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.144485 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-combined-ca-bundle\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.144681 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e3f74551-4d82-4d29-adc1-9116631e39c0-config-data-merged\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.145548 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/e3f74551-4d82-4d29-adc1-9116631e39c0-config-data-merged\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.145885 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/e3f74551-4d82-4d29-adc1-9116631e39c0-hm-ports\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.146061 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-config-data\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.151642 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-amphora-certs\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.151872 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-config-data\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.152443 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-scripts\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.167262 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f74551-4d82-4d29-adc1-9116631e39c0-combined-ca-bundle\") pod \"octavia-housekeeping-kppw9\" (UID: \"e3f74551-4d82-4d29-adc1-9116631e39c0\") " pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.248225 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.397542 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-qhw6s" event={"ID":"f680c438-04e4-45a2-9996-f6668b99065d","Type":"ContainerStarted","Data":"20d558a731002c77662906b604a64b55162745dc0934862d8be5a8cbd3988d77"} Nov 21 15:42:25 crc kubenswrapper[4774]: I1121 15:42:25.774588 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-housekeeping-kppw9"] Nov 21 15:42:25 crc kubenswrapper[4774]: W1121 15:42:25.785932 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3f74551_4d82_4d29_adc1_9116631e39c0.slice/crio-3d1197b73e80eb8a2a0faa5707dac44eae5275c4b95ceda1040cc095b1b9798a WatchSource:0}: Error finding container 3d1197b73e80eb8a2a0faa5707dac44eae5275c4b95ceda1040cc095b1b9798a: Status 404 returned error can't find the container with id 3d1197b73e80eb8a2a0faa5707dac44eae5275c4b95ceda1040cc095b1b9798a Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.062739 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/octavia-worker-dwldc"] Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.064539 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.074285 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-dwldc"] Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.097036 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-config-data" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.097288 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"octavia-worker-scripts" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.167897 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-hm-ports\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.169487 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-scripts\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.169520 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-amphora-certs\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.169984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-config-data\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.170126 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-combined-ca-bundle\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.170716 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-config-data-merged\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.273395 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-hm-ports\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.273635 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-scripts\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.273678 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-amphora-certs\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.273721 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-config-data\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.274520 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-combined-ca-bundle\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.274740 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-config-data-merged\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.275223 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-merged\" (UniqueName: \"kubernetes.io/empty-dir/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-config-data-merged\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.275930 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hm-ports\" (UniqueName: \"kubernetes.io/configmap/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-hm-ports\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.279681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"amphora-certs\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-amphora-certs\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.280243 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-combined-ca-bundle\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.280681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-config-data\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.280687 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb9639ba-9284-43d2-8a1d-0354ae6d4d11-scripts\") pod \"octavia-worker-dwldc\" (UID: \"fb9639ba-9284-43d2-8a1d-0354ae6d4d11\") " pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.407916 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-kppw9" event={"ID":"e3f74551-4d82-4d29-adc1-9116631e39c0","Type":"ContainerStarted","Data":"3d1197b73e80eb8a2a0faa5707dac44eae5275c4b95ceda1040cc095b1b9798a"} Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.430014 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/octavia-worker-dwldc" Nov 21 15:42:26 crc kubenswrapper[4774]: I1121 15:42:26.997646 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/octavia-worker-dwldc"] Nov 21 15:42:27 crc kubenswrapper[4774]: I1121 15:42:27.422992 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-dwldc" event={"ID":"fb9639ba-9284-43d2-8a1d-0354ae6d4d11","Type":"ContainerStarted","Data":"7549c20867376f8782a49eedd2c838bdb1c43f0a4452a43adf74fc6e0e88dc92"} Nov 21 15:42:30 crc kubenswrapper[4774]: I1121 15:42:30.450941 4774 generic.go:334] "Generic (PLEG): container finished" podID="f680c438-04e4-45a2-9996-f6668b99065d" containerID="20d558a731002c77662906b604a64b55162745dc0934862d8be5a8cbd3988d77" exitCode=0 Nov 21 15:42:30 crc kubenswrapper[4774]: I1121 15:42:30.451006 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-qhw6s" event={"ID":"f680c438-04e4-45a2-9996-f6668b99065d","Type":"ContainerDied","Data":"20d558a731002c77662906b604a64b55162745dc0934862d8be5a8cbd3988d77"} Nov 21 15:42:31 crc kubenswrapper[4774]: I1121 15:42:31.464947 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-dwldc" event={"ID":"fb9639ba-9284-43d2-8a1d-0354ae6d4d11","Type":"ContainerStarted","Data":"5166b0eba2ebdc23374ffc5e02fe661abd1fc2f62585d1dd04ac792933afc929"} Nov 21 15:42:31 crc kubenswrapper[4774]: I1121 15:42:31.468770 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-healthmanager-qhw6s" event={"ID":"f680c438-04e4-45a2-9996-f6668b99065d","Type":"ContainerStarted","Data":"dfbfc5ebe42108f4358e86535a9cdb1fcca85529b49e62778718d6e07a5412ec"} Nov 21 15:42:31 crc kubenswrapper[4774]: I1121 15:42:31.469930 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:31 crc kubenswrapper[4774]: I1121 15:42:31.471303 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-kppw9" event={"ID":"e3f74551-4d82-4d29-adc1-9116631e39c0","Type":"ContainerStarted","Data":"2435a5bebcb8f2f5b8a1307c93d2af6779bcfa269271a4cfa632c3c943c623cf"} Nov 21 15:42:31 crc kubenswrapper[4774]: I1121 15:42:31.504953 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-healthmanager-qhw6s" podStartSLOduration=8.504930931 podStartE2EDuration="8.504930931s" podCreationTimestamp="2025-11-21 15:42:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:42:31.504019825 +0000 UTC m=+5942.156219084" watchObservedRunningTime="2025-11-21 15:42:31.504930931 +0000 UTC m=+5942.157130190" Nov 21 15:42:32 crc kubenswrapper[4774]: I1121 15:42:32.482350 4774 generic.go:334] "Generic (PLEG): container finished" podID="e3f74551-4d82-4d29-adc1-9116631e39c0" containerID="2435a5bebcb8f2f5b8a1307c93d2af6779bcfa269271a4cfa632c3c943c623cf" exitCode=0 Nov 21 15:42:32 crc kubenswrapper[4774]: I1121 15:42:32.482414 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-kppw9" event={"ID":"e3f74551-4d82-4d29-adc1-9116631e39c0","Type":"ContainerDied","Data":"2435a5bebcb8f2f5b8a1307c93d2af6779bcfa269271a4cfa632c3c943c623cf"} Nov 21 15:42:32 crc kubenswrapper[4774]: I1121 15:42:32.484476 4774 generic.go:334] "Generic (PLEG): container finished" podID="fb9639ba-9284-43d2-8a1d-0354ae6d4d11" containerID="5166b0eba2ebdc23374ffc5e02fe661abd1fc2f62585d1dd04ac792933afc929" exitCode=0 Nov 21 15:42:32 crc kubenswrapper[4774]: I1121 15:42:32.485691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-dwldc" event={"ID":"fb9639ba-9284-43d2-8a1d-0354ae6d4d11","Type":"ContainerDied","Data":"5166b0eba2ebdc23374ffc5e02fe661abd1fc2f62585d1dd04ac792933afc929"} Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.130833 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sts8t"] Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.135700 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.145140 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sts8t"] Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.232954 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-utilities\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.233089 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-catalog-content\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.233307 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxxzb\" (UniqueName: \"kubernetes.io/projected/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-kube-api-access-pxxzb\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.334633 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-utilities\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.334697 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-catalog-content\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.334806 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxxzb\" (UniqueName: \"kubernetes.io/projected/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-kube-api-access-pxxzb\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.335124 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-utilities\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.335313 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-catalog-content\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.356660 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxxzb\" (UniqueName: \"kubernetes.io/projected/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-kube-api-access-pxxzb\") pod \"redhat-operators-sts8t\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.457730 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.501175 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-housekeeping-kppw9" event={"ID":"e3f74551-4d82-4d29-adc1-9116631e39c0","Type":"ContainerStarted","Data":"a8508c054c08386e13fe354ce8d2c333d6b67a8f9ef2894c930923e936c915b2"} Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.502602 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.505088 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/octavia-worker-dwldc" event={"ID":"fb9639ba-9284-43d2-8a1d-0354ae6d4d11","Type":"ContainerStarted","Data":"0c7e47194370268e14ab232dc92bcc252f6356c3e1178c71465e9f6100de23c0"} Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.505959 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/octavia-worker-dwldc" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.566103 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-housekeeping-kppw9" podStartSLOduration=4.565525299 podStartE2EDuration="9.566081108s" podCreationTimestamp="2025-11-21 15:42:24 +0000 UTC" firstStartedPulling="2025-11-21 15:42:25.796878985 +0000 UTC m=+5936.449078264" lastFinishedPulling="2025-11-21 15:42:30.797434814 +0000 UTC m=+5941.449634073" observedRunningTime="2025-11-21 15:42:33.531597495 +0000 UTC m=+5944.183796754" watchObservedRunningTime="2025-11-21 15:42:33.566081108 +0000 UTC m=+5944.218280367" Nov 21 15:42:33 crc kubenswrapper[4774]: I1121 15:42:33.576593 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/octavia-worker-dwldc" podStartSLOduration=3.767154603 podStartE2EDuration="7.576566957s" podCreationTimestamp="2025-11-21 15:42:26 +0000 UTC" firstStartedPulling="2025-11-21 15:42:27.00555499 +0000 UTC m=+5937.657754259" lastFinishedPulling="2025-11-21 15:42:30.814967354 +0000 UTC m=+5941.467166613" observedRunningTime="2025-11-21 15:42:33.562357901 +0000 UTC m=+5944.214557180" watchObservedRunningTime="2025-11-21 15:42:33.576566957 +0000 UTC m=+5944.228766226" Nov 21 15:42:34 crc kubenswrapper[4774]: I1121 15:42:34.050374 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sts8t"] Nov 21 15:42:34 crc kubenswrapper[4774]: I1121 15:42:34.518869 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerStarted","Data":"1627940140f5e4c78c522eb86caed2a9234efb48b0d0e40345b7dab64a753fc8"} Nov 21 15:42:35 crc kubenswrapper[4774]: I1121 15:42:35.530453 4774 generic.go:334] "Generic (PLEG): container finished" podID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerID="b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e" exitCode=0 Nov 21 15:42:35 crc kubenswrapper[4774]: I1121 15:42:35.530512 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerDied","Data":"b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e"} Nov 21 15:42:36 crc kubenswrapper[4774]: I1121 15:42:36.094133 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:42:36 crc kubenswrapper[4774]: E1121 15:42:36.094988 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:42:38 crc kubenswrapper[4774]: I1121 15:42:38.586883 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerStarted","Data":"be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923"} Nov 21 15:42:38 crc kubenswrapper[4774]: I1121 15:42:38.862720 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-healthmanager-qhw6s" Nov 21 15:42:39 crc kubenswrapper[4774]: I1121 15:42:39.598557 4774 generic.go:334] "Generic (PLEG): container finished" podID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerID="be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923" exitCode=0 Nov 21 15:42:39 crc kubenswrapper[4774]: I1121 15:42:39.598635 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerDied","Data":"be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923"} Nov 21 15:42:40 crc kubenswrapper[4774]: I1121 15:42:40.279127 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-housekeeping-kppw9" Nov 21 15:42:40 crc kubenswrapper[4774]: I1121 15:42:40.612500 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerStarted","Data":"110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5"} Nov 21 15:42:41 crc kubenswrapper[4774]: I1121 15:42:41.461877 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/octavia-worker-dwldc" Nov 21 15:42:41 crc kubenswrapper[4774]: I1121 15:42:41.640708 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sts8t" podStartSLOduration=4.18321541 podStartE2EDuration="8.640690286s" podCreationTimestamp="2025-11-21 15:42:33 +0000 UTC" firstStartedPulling="2025-11-21 15:42:35.532919925 +0000 UTC m=+5946.185119184" lastFinishedPulling="2025-11-21 15:42:39.990394801 +0000 UTC m=+5950.642594060" observedRunningTime="2025-11-21 15:42:41.636237339 +0000 UTC m=+5952.288436618" watchObservedRunningTime="2025-11-21 15:42:41.640690286 +0000 UTC m=+5952.292889545" Nov 21 15:42:43 crc kubenswrapper[4774]: I1121 15:42:43.458876 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:43 crc kubenswrapper[4774]: I1121 15:42:43.459326 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:44 crc kubenswrapper[4774]: I1121 15:42:44.532595 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sts8t" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="registry-server" probeResult="failure" output=< Nov 21 15:42:44 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 15:42:44 crc kubenswrapper[4774]: > Nov 21 15:42:50 crc kubenswrapper[4774]: I1121 15:42:50.108560 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:42:50 crc kubenswrapper[4774]: E1121 15:42:50.109301 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:42:53 crc kubenswrapper[4774]: I1121 15:42:53.506953 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:53 crc kubenswrapper[4774]: I1121 15:42:53.565917 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:53 crc kubenswrapper[4774]: I1121 15:42:53.742289 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sts8t"] Nov 21 15:42:54 crc kubenswrapper[4774]: I1121 15:42:54.748108 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sts8t" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="registry-server" containerID="cri-o://110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5" gracePeriod=2 Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.178011 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.179520 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-catalog-content\") pod \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.179601 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-utilities\") pod \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.179710 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pxxzb\" (UniqueName: \"kubernetes.io/projected/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-kube-api-access-pxxzb\") pod \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\" (UID: \"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb\") " Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.180808 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-utilities" (OuterVolumeSpecName: "utilities") pod "e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" (UID: "e7da03aa-595a-4b94-84e0-fa8cb1f13fbb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.183474 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.197947 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-kube-api-access-pxxzb" (OuterVolumeSpecName: "kube-api-access-pxxzb") pod "e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" (UID: "e7da03aa-595a-4b94-84e0-fa8cb1f13fbb"). InnerVolumeSpecName "kube-api-access-pxxzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.265917 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" (UID: "e7da03aa-595a-4b94-84e0-fa8cb1f13fbb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.285902 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.285940 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pxxzb\" (UniqueName: \"kubernetes.io/projected/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb-kube-api-access-pxxzb\") on node \"crc\" DevicePath \"\"" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.762258 4774 generic.go:334] "Generic (PLEG): container finished" podID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerID="110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5" exitCode=0 Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.762301 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerDied","Data":"110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5"} Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.762334 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sts8t" event={"ID":"e7da03aa-595a-4b94-84e0-fa8cb1f13fbb","Type":"ContainerDied","Data":"1627940140f5e4c78c522eb86caed2a9234efb48b0d0e40345b7dab64a753fc8"} Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.762339 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sts8t" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.762355 4774 scope.go:117] "RemoveContainer" containerID="110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.799296 4774 scope.go:117] "RemoveContainer" containerID="be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.803310 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sts8t"] Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.812280 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sts8t"] Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.854298 4774 scope.go:117] "RemoveContainer" containerID="b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.890751 4774 scope.go:117] "RemoveContainer" containerID="110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5" Nov 21 15:42:55 crc kubenswrapper[4774]: E1121 15:42:55.891537 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5\": container with ID starting with 110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5 not found: ID does not exist" containerID="110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.891613 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5"} err="failed to get container status \"110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5\": rpc error: code = NotFound desc = could not find container \"110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5\": container with ID starting with 110ff8382f01c488514b357110fb1f1d9a0598007a2f46c43769c7519a62c0f5 not found: ID does not exist" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.891650 4774 scope.go:117] "RemoveContainer" containerID="be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923" Nov 21 15:42:55 crc kubenswrapper[4774]: E1121 15:42:55.892526 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923\": container with ID starting with be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923 not found: ID does not exist" containerID="be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.892574 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923"} err="failed to get container status \"be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923\": rpc error: code = NotFound desc = could not find container \"be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923\": container with ID starting with be1ec2670ce439945914ac2e62c71e6cd2295b004e8ec92831909aced48f1923 not found: ID does not exist" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.892602 4774 scope.go:117] "RemoveContainer" containerID="b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e" Nov 21 15:42:55 crc kubenswrapper[4774]: E1121 15:42:55.893135 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e\": container with ID starting with b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e not found: ID does not exist" containerID="b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e" Nov 21 15:42:55 crc kubenswrapper[4774]: I1121 15:42:55.893181 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e"} err="failed to get container status \"b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e\": rpc error: code = NotFound desc = could not find container \"b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e\": container with ID starting with b9a0f9906c567fda0b43996fa5b58f6f7d445691986bc664441cd4b9bdd51e8e not found: ID does not exist" Nov 21 15:42:56 crc kubenswrapper[4774]: I1121 15:42:56.105188 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" path="/var/lib/kubelet/pods/e7da03aa-595a-4b94-84e0-fa8cb1f13fbb/volumes" Nov 21 15:43:03 crc kubenswrapper[4774]: I1121 15:43:03.053365 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-fbwmf"] Nov 21 15:43:03 crc kubenswrapper[4774]: I1121 15:43:03.070335 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-ca6d-account-create-dkgrf"] Nov 21 15:43:03 crc kubenswrapper[4774]: I1121 15:43:03.082268 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-fbwmf"] Nov 21 15:43:03 crc kubenswrapper[4774]: I1121 15:43:03.095199 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-ca6d-account-create-dkgrf"] Nov 21 15:43:04 crc kubenswrapper[4774]: I1121 15:43:04.093638 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:43:04 crc kubenswrapper[4774]: E1121 15:43:04.094442 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:43:04 crc kubenswrapper[4774]: I1121 15:43:04.111413 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d410396-7f33-4931-aea3-fc85b62814f0" path="/var/lib/kubelet/pods/6d410396-7f33-4931-aea3-fc85b62814f0/volumes" Nov 21 15:43:04 crc kubenswrapper[4774]: I1121 15:43:04.112497 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a58f797a-f829-44c8-8339-90e553652d85" path="/var/lib/kubelet/pods/a58f797a-f829-44c8-8339-90e553652d85/volumes" Nov 21 15:43:10 crc kubenswrapper[4774]: I1121 15:43:10.041695 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-5ttt7"] Nov 21 15:43:10 crc kubenswrapper[4774]: I1121 15:43:10.057879 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-5ttt7"] Nov 21 15:43:10 crc kubenswrapper[4774]: I1121 15:43:10.112203 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fd77432-caee-4eca-a8b0-f10e61ba68c5" path="/var/lib/kubelet/pods/6fd77432-caee-4eca-a8b0-f10e61ba68c5/volumes" Nov 21 15:43:15 crc kubenswrapper[4774]: I1121 15:43:15.095259 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:43:15 crc kubenswrapper[4774]: E1121 15:43:15.096175 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:43:26 crc kubenswrapper[4774]: I1121 15:43:26.093359 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:43:26 crc kubenswrapper[4774]: E1121 15:43:26.094428 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.710097 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6878d4cff5-g8xtp"] Nov 21 15:43:35 crc kubenswrapper[4774]: E1121 15:43:35.711155 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="extract-utilities" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.711173 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="extract-utilities" Nov 21 15:43:35 crc kubenswrapper[4774]: E1121 15:43:35.711198 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="registry-server" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.711206 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="registry-server" Nov 21 15:43:35 crc kubenswrapper[4774]: E1121 15:43:35.711220 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="extract-content" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.711227 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="extract-content" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.711512 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7da03aa-595a-4b94-84e0-fa8cb1f13fbb" containerName="registry-server" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.712784 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.715585 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.731378 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.731616 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-fx4hz" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.736537 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6878d4cff5-g8xtp"] Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.737756 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.777882 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.778468 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-log" containerID="cri-o://36a7ad8b954f5c270872626ea4bfbc98f113fd14ce768d0617ab3476f0de6ba9" gracePeriod=30 Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.778539 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-httpd" containerID="cri-o://c22eba4bc0a1620eb9dcf19688edc011d7205ff5b7093ca303575e24789b6e22" gracePeriod=30 Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.821160 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5338fd26-b1ac-479e-b193-171b2b3f09bc-horizon-secret-key\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.821306 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-scripts\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.821338 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-config-data\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.821366 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vfsk\" (UniqueName: \"kubernetes.io/projected/5338fd26-b1ac-479e-b193-171b2b3f09bc-kube-api-access-4vfsk\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.821392 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5338fd26-b1ac-479e-b193-171b2b3f09bc-logs\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.873940 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.874220 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-log" containerID="cri-o://5e5877512b194f9170acb775272cc9142553c683e10c1b67ce473e218d02c0e9" gracePeriod=30 Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.874669 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-httpd" containerID="cri-o://66bc77eba788a555a7827510ba663b7087faa84356832825035b64638db03638" gracePeriod=30 Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.897438 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-644d85c745-gsdh6"] Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.899648 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.908186 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-644d85c745-gsdh6"] Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.928761 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-scripts\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.928932 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-config-data\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.928991 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vfsk\" (UniqueName: \"kubernetes.io/projected/5338fd26-b1ac-479e-b193-171b2b3f09bc-kube-api-access-4vfsk\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.929029 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5338fd26-b1ac-479e-b193-171b2b3f09bc-logs\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.929216 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5338fd26-b1ac-479e-b193-171b2b3f09bc-horizon-secret-key\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.929602 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5338fd26-b1ac-479e-b193-171b2b3f09bc-logs\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.929923 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-scripts\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.931852 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-config-data\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.941561 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5338fd26-b1ac-479e-b193-171b2b3f09bc-horizon-secret-key\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:35 crc kubenswrapper[4774]: I1121 15:43:35.950246 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vfsk\" (UniqueName: \"kubernetes.io/projected/5338fd26-b1ac-479e-b193-171b2b3f09bc-kube-api-access-4vfsk\") pod \"horizon-6878d4cff5-g8xtp\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.031500 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-scripts\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.031859 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-config-data\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.031894 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5xg4\" (UniqueName: \"kubernetes.io/projected/d0ec0af3-99c8-430d-b20d-f337375960d5-kube-api-access-s5xg4\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.032043 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d0ec0af3-99c8-430d-b20d-f337375960d5-horizon-secret-key\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.032084 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ec0af3-99c8-430d-b20d-f337375960d5-logs\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.052124 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.133602 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-config-data\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.133649 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5xg4\" (UniqueName: \"kubernetes.io/projected/d0ec0af3-99c8-430d-b20d-f337375960d5-kube-api-access-s5xg4\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.133762 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d0ec0af3-99c8-430d-b20d-f337375960d5-horizon-secret-key\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.133793 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ec0af3-99c8-430d-b20d-f337375960d5-logs\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.133853 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-scripts\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.134641 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-scripts\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.134849 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ec0af3-99c8-430d-b20d-f337375960d5-logs\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.135200 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-config-data\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.141897 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d0ec0af3-99c8-430d-b20d-f337375960d5-horizon-secret-key\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.144608 4774 generic.go:334] "Generic (PLEG): container finished" podID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerID="5e5877512b194f9170acb775272cc9142553c683e10c1b67ce473e218d02c0e9" exitCode=143 Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.144672 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6315b9aa-80dc-44e1-860b-64228934f3c4","Type":"ContainerDied","Data":"5e5877512b194f9170acb775272cc9142553c683e10c1b67ce473e218d02c0e9"} Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.162767 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5xg4\" (UniqueName: \"kubernetes.io/projected/d0ec0af3-99c8-430d-b20d-f337375960d5-kube-api-access-s5xg4\") pod \"horizon-644d85c745-gsdh6\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.169535 4774 generic.go:334] "Generic (PLEG): container finished" podID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerID="36a7ad8b954f5c270872626ea4bfbc98f113fd14ce768d0617ab3476f0de6ba9" exitCode=143 Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.169585 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"01940ba5-c2da-40b4-aa60-e07998ef2bb0","Type":"ContainerDied","Data":"36a7ad8b954f5c270872626ea4bfbc98f113fd14ce768d0617ab3476f0de6ba9"} Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.327467 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.465732 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-644d85c745-gsdh6"] Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.509582 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7b68ccc75c-xqkjc"] Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.511160 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.543796 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b68ccc75c-xqkjc"] Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.657984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-config-data\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.658248 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-scripts\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.658405 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beddfe34-7178-4f7d-9428-ebd52715e910-logs\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.658539 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd4nn\" (UniqueName: \"kubernetes.io/projected/beddfe34-7178-4f7d-9428-ebd52715e910-kube-api-access-vd4nn\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.658699 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/beddfe34-7178-4f7d-9428-ebd52715e910-horizon-secret-key\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.661574 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6878d4cff5-g8xtp"] Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.760687 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-scripts\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.760761 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beddfe34-7178-4f7d-9428-ebd52715e910-logs\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.760814 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd4nn\" (UniqueName: \"kubernetes.io/projected/beddfe34-7178-4f7d-9428-ebd52715e910-kube-api-access-vd4nn\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.760891 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/beddfe34-7178-4f7d-9428-ebd52715e910-horizon-secret-key\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.761236 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-config-data\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.761353 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beddfe34-7178-4f7d-9428-ebd52715e910-logs\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.761637 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-scripts\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.762584 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-config-data\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.766294 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/beddfe34-7178-4f7d-9428-ebd52715e910-horizon-secret-key\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.777189 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd4nn\" (UniqueName: \"kubernetes.io/projected/beddfe34-7178-4f7d-9428-ebd52715e910-kube-api-access-vd4nn\") pod \"horizon-7b68ccc75c-xqkjc\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.856464 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:36 crc kubenswrapper[4774]: I1121 15:43:36.895279 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-644d85c745-gsdh6"] Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.057930 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-711e-account-create-8s4fd"] Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.070008 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-z5bzr"] Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.082034 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-711e-account-create-8s4fd"] Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.092167 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-z5bzr"] Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.182047 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d85c745-gsdh6" event={"ID":"d0ec0af3-99c8-430d-b20d-f337375960d5","Type":"ContainerStarted","Data":"c91f4ed1a9bafc3ce464b5c0e807f35920467400eb0d8b91e8a07879ab47c036"} Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.183617 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6878d4cff5-g8xtp" event={"ID":"5338fd26-b1ac-479e-b193-171b2b3f09bc","Type":"ContainerStarted","Data":"46b5e5ab06e2db632a14eca293571d735f8d7901b45e18e9dfca1085db2185e9"} Nov 21 15:43:37 crc kubenswrapper[4774]: I1121 15:43:37.360461 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7b68ccc75c-xqkjc"] Nov 21 15:43:37 crc kubenswrapper[4774]: W1121 15:43:37.363916 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbeddfe34_7178_4f7d_9428_ebd52715e910.slice/crio-4a5f5c8e32ca1d4b3e3552302d280c9891beb1566049d5ec6c185bb044b3a8d2 WatchSource:0}: Error finding container 4a5f5c8e32ca1d4b3e3552302d280c9891beb1566049d5ec6c185bb044b3a8d2: Status 404 returned error can't find the container with id 4a5f5c8e32ca1d4b3e3552302d280c9891beb1566049d5ec6c185bb044b3a8d2 Nov 21 15:43:38 crc kubenswrapper[4774]: I1121 15:43:38.110420 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e295507-fc0d-4844-97a7-7121be4456ad" path="/var/lib/kubelet/pods/7e295507-fc0d-4844-97a7-7121be4456ad/volumes" Nov 21 15:43:38 crc kubenswrapper[4774]: I1121 15:43:38.112526 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a73a54d7-8b04-46ea-ad8c-a4b4f6d57835" path="/var/lib/kubelet/pods/a73a54d7-8b04-46ea-ad8c-a4b4f6d57835/volumes" Nov 21 15:43:38 crc kubenswrapper[4774]: I1121 15:43:38.195809 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b68ccc75c-xqkjc" event={"ID":"beddfe34-7178-4f7d-9428-ebd52715e910","Type":"ContainerStarted","Data":"4a5f5c8e32ca1d4b3e3552302d280c9891beb1566049d5ec6c185bb044b3a8d2"} Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.222937 4774 generic.go:334] "Generic (PLEG): container finished" podID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerID="66bc77eba788a555a7827510ba663b7087faa84356832825035b64638db03638" exitCode=0 Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.222999 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6315b9aa-80dc-44e1-860b-64228934f3c4","Type":"ContainerDied","Data":"66bc77eba788a555a7827510ba663b7087faa84356832825035b64638db03638"} Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.225612 4774 generic.go:334] "Generic (PLEG): container finished" podID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerID="c22eba4bc0a1620eb9dcf19688edc011d7205ff5b7093ca303575e24789b6e22" exitCode=0 Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.225661 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"01940ba5-c2da-40b4-aa60-e07998ef2bb0","Type":"ContainerDied","Data":"c22eba4bc0a1620eb9dcf19688edc011d7205ff5b7093ca303575e24789b6e22"} Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.503810 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.627732 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-httpd-run\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.627855 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dc6s\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-kube-api-access-6dc6s\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.627981 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-logs\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.628031 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-config-data\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.628051 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-combined-ca-bundle\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.628089 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-ceph\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.628116 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-scripts\") pod \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\" (UID: \"01940ba5-c2da-40b4-aa60-e07998ef2bb0\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.630130 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.630070 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-logs" (OuterVolumeSpecName: "logs") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.634944 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-scripts" (OuterVolumeSpecName: "scripts") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.636049 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-ceph" (OuterVolumeSpecName: "ceph") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.636232 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-kube-api-access-6dc6s" (OuterVolumeSpecName: "kube-api-access-6dc6s") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "kube-api-access-6dc6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.662929 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.696470 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.727926 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-config-data" (OuterVolumeSpecName: "config-data") pod "01940ba5-c2da-40b4-aa60-e07998ef2bb0" (UID: "01940ba5-c2da-40b4-aa60-e07998ef2bb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730221 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730258 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730275 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730286 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01940ba5-c2da-40b4-aa60-e07998ef2bb0-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730295 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730304 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dc6s\" (UniqueName: \"kubernetes.io/projected/01940ba5-c2da-40b4-aa60-e07998ef2bb0-kube-api-access-6dc6s\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.730313 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01940ba5-c2da-40b4-aa60-e07998ef2bb0-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831423 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-httpd-run\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831514 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-logs\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831561 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb4tm\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-kube-api-access-cb4tm\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831593 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-ceph\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831647 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-scripts\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831693 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-config-data\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.831799 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-combined-ca-bundle\") pod \"6315b9aa-80dc-44e1-860b-64228934f3c4\" (UID: \"6315b9aa-80dc-44e1-860b-64228934f3c4\") " Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.833322 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-logs" (OuterVolumeSpecName: "logs") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.833310 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.836224 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-ceph" (OuterVolumeSpecName: "ceph") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.836754 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-scripts" (OuterVolumeSpecName: "scripts") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.837068 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-kube-api-access-cb4tm" (OuterVolumeSpecName: "kube-api-access-cb4tm") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "kube-api-access-cb4tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.863315 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.884514 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-config-data" (OuterVolumeSpecName: "config-data") pod "6315b9aa-80dc-44e1-860b-64228934f3c4" (UID: "6315b9aa-80dc-44e1-860b-64228934f3c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.933998 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.934029 4774 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-httpd-run\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.934038 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6315b9aa-80dc-44e1-860b-64228934f3c4-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.934047 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb4tm\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-kube-api-access-cb4tm\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.934056 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/6315b9aa-80dc-44e1-860b-64228934f3c4-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.934066 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:39 crc kubenswrapper[4774]: I1121 15:43:39.934073 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6315b9aa-80dc-44e1-860b-64228934f3c4-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.112880 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:43:40 crc kubenswrapper[4774]: E1121 15:43:40.113203 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.252049 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"6315b9aa-80dc-44e1-860b-64228934f3c4","Type":"ContainerDied","Data":"fcae138905ccf1c634d2695f1251f185f1e5587560d985a0b8ec4afdc912660e"} Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.252112 4774 scope.go:117] "RemoveContainer" containerID="66bc77eba788a555a7827510ba663b7087faa84356832825035b64638db03638" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.252267 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.266465 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"01940ba5-c2da-40b4-aa60-e07998ef2bb0","Type":"ContainerDied","Data":"88f234ae082d0900aaed5fc58d34efa0b68b9ac86b46b8ab6941c53793b334fa"} Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.266551 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.296544 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.316692 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.339908 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.357291 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.380958 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: E1121 15:43:40.381433 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-httpd" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381451 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-httpd" Nov 21 15:43:40 crc kubenswrapper[4774]: E1121 15:43:40.381464 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-httpd" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381471 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-httpd" Nov 21 15:43:40 crc kubenswrapper[4774]: E1121 15:43:40.381484 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-log" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381490 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-log" Nov 21 15:43:40 crc kubenswrapper[4774]: E1121 15:43:40.381528 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-log" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381536 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-log" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381726 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-log" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381739 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-log" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381750 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" containerName="glance-httpd" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.381761 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" containerName="glance-httpd" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.382910 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.388174 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.388622 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-4l7bh" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.388744 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.403971 4774 scope.go:117] "RemoveContainer" containerID="5e5877512b194f9170acb775272cc9142553c683e10c1b67ce473e218d02c0e9" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.427082 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.439352 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.442240 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.459893 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.476889 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.520955 4774 scope.go:117] "RemoveContainer" containerID="c22eba4bc0a1620eb9dcf19688edc011d7205ff5b7093ca303575e24789b6e22" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.554610 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.554986 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.555011 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da63df4b-d3aa-48c0-9d11-834c1c7e825b-logs\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.555050 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/da63df4b-d3aa-48c0-9d11-834c1c7e825b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.555071 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-scripts\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.555356 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/da63df4b-d3aa-48c0-9d11-834c1c7e825b-ceph\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.555739 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njztx\" (UniqueName: \"kubernetes.io/projected/da63df4b-d3aa-48c0-9d11-834c1c7e825b-kube-api-access-njztx\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.555896 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.556059 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ed244fd-40a8-41fe-84dc-b291ec15dd87-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.556160 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f65l5\" (UniqueName: \"kubernetes.io/projected/3ed244fd-40a8-41fe-84dc-b291ec15dd87-kube-api-access-f65l5\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.556199 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.556227 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-config-data\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.556255 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3ed244fd-40a8-41fe-84dc-b291ec15dd87-ceph\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.556311 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ed244fd-40a8-41fe-84dc-b291ec15dd87-logs\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.563070 4774 scope.go:117] "RemoveContainer" containerID="36a7ad8b954f5c270872626ea4bfbc98f113fd14ce768d0617ab3476f0de6ba9" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659342 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ed244fd-40a8-41fe-84dc-b291ec15dd87-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659451 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f65l5\" (UniqueName: \"kubernetes.io/projected/3ed244fd-40a8-41fe-84dc-b291ec15dd87-kube-api-access-f65l5\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659483 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659511 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-config-data\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659541 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3ed244fd-40a8-41fe-84dc-b291ec15dd87-ceph\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659577 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ed244fd-40a8-41fe-84dc-b291ec15dd87-logs\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659642 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659670 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659694 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da63df4b-d3aa-48c0-9d11-834c1c7e825b-logs\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659724 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/da63df4b-d3aa-48c0-9d11-834c1c7e825b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659747 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-scripts\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659793 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/da63df4b-d3aa-48c0-9d11-834c1c7e825b-ceph\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659871 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njztx\" (UniqueName: \"kubernetes.io/projected/da63df4b-d3aa-48c0-9d11-834c1c7e825b-kube-api-access-njztx\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659874 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ed244fd-40a8-41fe-84dc-b291ec15dd87-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.659914 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.660301 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ed244fd-40a8-41fe-84dc-b291ec15dd87-logs\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.667500 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/da63df4b-d3aa-48c0-9d11-834c1c7e825b-logs\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.668597 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-scripts\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.669212 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/3ed244fd-40a8-41fe-84dc-b291ec15dd87-ceph\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.669530 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.669562 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.670130 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/da63df4b-d3aa-48c0-9d11-834c1c7e825b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.670234 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.670304 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ed244fd-40a8-41fe-84dc-b291ec15dd87-config-data\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.672195 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/da63df4b-d3aa-48c0-9d11-834c1c7e825b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.672266 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/da63df4b-d3aa-48c0-9d11-834c1c7e825b-ceph\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.685078 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njztx\" (UniqueName: \"kubernetes.io/projected/da63df4b-d3aa-48c0-9d11-834c1c7e825b-kube-api-access-njztx\") pod \"glance-default-internal-api-0\" (UID: \"da63df4b-d3aa-48c0-9d11-834c1c7e825b\") " pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.687262 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f65l5\" (UniqueName: \"kubernetes.io/projected/3ed244fd-40a8-41fe-84dc-b291ec15dd87-kube-api-access-f65l5\") pod \"glance-default-external-api-0\" (UID: \"3ed244fd-40a8-41fe-84dc-b291ec15dd87\") " pod="openstack/glance-default-external-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.763285 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:40 crc kubenswrapper[4774]: I1121 15:43:40.800580 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Nov 21 15:43:41 crc kubenswrapper[4774]: I1121 15:43:41.841186 4774 scope.go:117] "RemoveContainer" containerID="013a3eab950769e10a370cb2855c6cf818a27cac0ec6c87662c04a2ea1eac0da" Nov 21 15:43:42 crc kubenswrapper[4774]: I1121 15:43:42.105560 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01940ba5-c2da-40b4-aa60-e07998ef2bb0" path="/var/lib/kubelet/pods/01940ba5-c2da-40b4-aa60-e07998ef2bb0/volumes" Nov 21 15:43:42 crc kubenswrapper[4774]: I1121 15:43:42.106966 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6315b9aa-80dc-44e1-860b-64228934f3c4" path="/var/lib/kubelet/pods/6315b9aa-80dc-44e1-860b-64228934f3c4/volumes" Nov 21 15:43:45 crc kubenswrapper[4774]: I1121 15:43:45.546377 4774 scope.go:117] "RemoveContainer" containerID="9194ad9be5dc1830c57b82a75343406a75d24d9071113ce5ceb99061b778b111" Nov 21 15:43:45 crc kubenswrapper[4774]: I1121 15:43:45.814759 4774 scope.go:117] "RemoveContainer" containerID="dcb501b455e91a226563f55d1fd6d9404ef8342107aaced2f21f1dd7d08af0b8" Nov 21 15:43:45 crc kubenswrapper[4774]: I1121 15:43:45.870033 4774 scope.go:117] "RemoveContainer" containerID="bcc91eca8a5e0bba0e5e56e8d5e4b1a8e2ed67fdec89e6d3fcce007474476c2d" Nov 21 15:43:45 crc kubenswrapper[4774]: I1121 15:43:45.916840 4774 scope.go:117] "RemoveContainer" containerID="dd2deea2401320b381a43022556e48db3a44b54ef1e4cc8fbc90a6b1081d9a32" Nov 21 15:43:45 crc kubenswrapper[4774]: I1121 15:43:45.953385 4774 scope.go:117] "RemoveContainer" containerID="7b0f53413236437273b291bdcb17f3c75a24d41889d6b532e6a697e2e60d182c" Nov 21 15:43:45 crc kubenswrapper[4774]: I1121 15:43:45.980380 4774 scope.go:117] "RemoveContainer" containerID="86c679eb0f2aa8c651097aba371f01631f91789d72589c40973f19deb9f7592d" Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.006393 4774 scope.go:117] "RemoveContainer" containerID="e82c98831c5e5fc913b361d4338f58a9eb3b6b86586401fd570a0ce46cd3c2ab" Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.280173 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.355200 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6878d4cff5-g8xtp" event={"ID":"5338fd26-b1ac-479e-b193-171b2b3f09bc","Type":"ContainerStarted","Data":"42d6bd695e199bfcdb1884d307a551bc95bddd0e1c9ebbd51a556455c0d20c63"} Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.373916 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.378044 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3ed244fd-40a8-41fe-84dc-b291ec15dd87","Type":"ContainerStarted","Data":"5db54e94d8e9bf67bde711a592ef7ddcda2326f5a7f353303da9d34a5599787e"} Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.381658 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d85c745-gsdh6" event={"ID":"d0ec0af3-99c8-430d-b20d-f337375960d5","Type":"ContainerStarted","Data":"c587dd6664e16dc6c8f9c91ebb51e676a233207617e909016ab35e84007ec300"} Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.388683 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b68ccc75c-xqkjc" event={"ID":"beddfe34-7178-4f7d-9428-ebd52715e910","Type":"ContainerStarted","Data":"ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c"} Nov 21 15:43:46 crc kubenswrapper[4774]: W1121 15:43:46.401044 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda63df4b_d3aa_48c0_9d11_834c1c7e825b.slice/crio-c95504341891cd4cd51c4fc5a35235c0e3f67fd3700f453dcf54414dcbad23b0 WatchSource:0}: Error finding container c95504341891cd4cd51c4fc5a35235c0e3f67fd3700f453dcf54414dcbad23b0: Status 404 returned error can't find the container with id c95504341891cd4cd51c4fc5a35235c0e3f67fd3700f453dcf54414dcbad23b0 Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.420977 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7b68ccc75c-xqkjc" podStartSLOduration=2.126254892 podStartE2EDuration="10.420957325s" podCreationTimestamp="2025-11-21 15:43:36 +0000 UTC" firstStartedPulling="2025-11-21 15:43:37.367013029 +0000 UTC m=+6008.019212288" lastFinishedPulling="2025-11-21 15:43:45.661715452 +0000 UTC m=+6016.313914721" observedRunningTime="2025-11-21 15:43:46.414443289 +0000 UTC m=+6017.066642558" watchObservedRunningTime="2025-11-21 15:43:46.420957325 +0000 UTC m=+6017.073156584" Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.857573 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:46 crc kubenswrapper[4774]: I1121 15:43:46.857875 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.405330 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6878d4cff5-g8xtp" event={"ID":"5338fd26-b1ac-479e-b193-171b2b3f09bc","Type":"ContainerStarted","Data":"24262b2bb12a39d7788af90994f29259b7132d7e56ff909ec3e15271f072095c"} Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.408893 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b68ccc75c-xqkjc" event={"ID":"beddfe34-7178-4f7d-9428-ebd52715e910","Type":"ContainerStarted","Data":"8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26"} Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.411180 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3ed244fd-40a8-41fe-84dc-b291ec15dd87","Type":"ContainerStarted","Data":"f3fc799e46ea9e03e1a0ccbbaf03a2b3ca4b04f8c40229c9bad5c955a68bdc11"} Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.413030 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"da63df4b-d3aa-48c0-9d11-834c1c7e825b","Type":"ContainerStarted","Data":"5a727f10b5ba2bff3b5b700cf678f6ba818e9ebf6bd4537f6073e98812424153"} Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.413088 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"da63df4b-d3aa-48c0-9d11-834c1c7e825b","Type":"ContainerStarted","Data":"c95504341891cd4cd51c4fc5a35235c0e3f67fd3700f453dcf54414dcbad23b0"} Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.415225 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d85c745-gsdh6" event={"ID":"d0ec0af3-99c8-430d-b20d-f337375960d5","Type":"ContainerStarted","Data":"dd01c5b637bb7e4ce7eb672c4654f3483df6035ed70e9c46a8081d6b7eb69e8b"} Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.415370 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-644d85c745-gsdh6" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon-log" containerID="cri-o://c587dd6664e16dc6c8f9c91ebb51e676a233207617e909016ab35e84007ec300" gracePeriod=30 Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.415800 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-644d85c745-gsdh6" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon" containerID="cri-o://dd01c5b637bb7e4ce7eb672c4654f3483df6035ed70e9c46a8081d6b7eb69e8b" gracePeriod=30 Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.439586 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6878d4cff5-g8xtp" podStartSLOduration=3.440246233 podStartE2EDuration="12.439567271s" podCreationTimestamp="2025-11-21 15:43:35 +0000 UTC" firstStartedPulling="2025-11-21 15:43:36.664055561 +0000 UTC m=+6007.316254820" lastFinishedPulling="2025-11-21 15:43:45.663376599 +0000 UTC m=+6016.315575858" observedRunningTime="2025-11-21 15:43:47.420879158 +0000 UTC m=+6018.073078427" watchObservedRunningTime="2025-11-21 15:43:47.439567271 +0000 UTC m=+6018.091766530" Nov 21 15:43:47 crc kubenswrapper[4774]: I1121 15:43:47.447499 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-644d85c745-gsdh6" podStartSLOduration=3.610603849 podStartE2EDuration="12.447462356s" podCreationTimestamp="2025-11-21 15:43:35 +0000 UTC" firstStartedPulling="2025-11-21 15:43:36.899123902 +0000 UTC m=+6007.551323151" lastFinishedPulling="2025-11-21 15:43:45.735982399 +0000 UTC m=+6016.388181658" observedRunningTime="2025-11-21 15:43:47.4427015 +0000 UTC m=+6018.094900759" watchObservedRunningTime="2025-11-21 15:43:47.447462356 +0000 UTC m=+6018.099661615" Nov 21 15:43:48 crc kubenswrapper[4774]: I1121 15:43:48.426965 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"da63df4b-d3aa-48c0-9d11-834c1c7e825b","Type":"ContainerStarted","Data":"3697d9a31a83424de126f7008b58460d049fd48137c5ea24603c774312ddc799"} Nov 21 15:43:48 crc kubenswrapper[4774]: I1121 15:43:48.431291 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"3ed244fd-40a8-41fe-84dc-b291ec15dd87","Type":"ContainerStarted","Data":"f46174928d42261bc2398f2e96220bb7668ef9116f450317d6235e2f8e24f6f4"} Nov 21 15:43:48 crc kubenswrapper[4774]: I1121 15:43:48.468773 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=8.468746949 podStartE2EDuration="8.468746949s" podCreationTimestamp="2025-11-21 15:43:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:43:48.446275529 +0000 UTC m=+6019.098474788" watchObservedRunningTime="2025-11-21 15:43:48.468746949 +0000 UTC m=+6019.120946218" Nov 21 15:43:48 crc kubenswrapper[4774]: I1121 15:43:48.483393 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.483368556 podStartE2EDuration="8.483368556s" podCreationTimestamp="2025-11-21 15:43:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:43:48.46700414 +0000 UTC m=+6019.119203419" watchObservedRunningTime="2025-11-21 15:43:48.483368556 +0000 UTC m=+6019.135567825" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.763482 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.765336 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.801334 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.801372 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.829124 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.838086 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.838159 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:50 crc kubenswrapper[4774]: I1121 15:43:50.854617 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Nov 21 15:43:51 crc kubenswrapper[4774]: I1121 15:43:51.094081 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:43:51 crc kubenswrapper[4774]: E1121 15:43:51.094589 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:43:51 crc kubenswrapper[4774]: I1121 15:43:51.461082 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 15:43:51 crc kubenswrapper[4774]: I1121 15:43:51.461143 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Nov 21 15:43:51 crc kubenswrapper[4774]: I1121 15:43:51.461157 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:51 crc kubenswrapper[4774]: I1121 15:43:51.461292 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:53 crc kubenswrapper[4774]: I1121 15:43:53.562770 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:53 crc kubenswrapper[4774]: I1121 15:43:53.792273 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 15:43:54 crc kubenswrapper[4774]: I1121 15:43:54.595441 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Nov 21 15:43:54 crc kubenswrapper[4774]: I1121 15:43:54.717491 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Nov 21 15:43:56 crc kubenswrapper[4774]: I1121 15:43:56.052476 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:56 crc kubenswrapper[4774]: I1121 15:43:56.053684 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:43:56 crc kubenswrapper[4774]: I1121 15:43:56.053871 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6878d4cff5-g8xtp" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 21 15:43:56 crc kubenswrapper[4774]: I1121 15:43:56.328896 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:43:56 crc kubenswrapper[4774]: I1121 15:43:56.858898 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7b68ccc75c-xqkjc" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Nov 21 15:44:03 crc kubenswrapper[4774]: I1121 15:44:03.092944 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:44:03 crc kubenswrapper[4774]: E1121 15:44:03.093670 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:44:04 crc kubenswrapper[4774]: I1121 15:44:04.052233 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-wr864"] Nov 21 15:44:04 crc kubenswrapper[4774]: I1121 15:44:04.062083 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-wr864"] Nov 21 15:44:04 crc kubenswrapper[4774]: I1121 15:44:04.102944 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ce5745d-f81f-4de1-b663-f7469255c903" path="/var/lib/kubelet/pods/6ce5745d-f81f-4de1-b663-f7469255c903/volumes" Nov 21 15:44:06 crc kubenswrapper[4774]: I1121 15:44:06.052802 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6878d4cff5-g8xtp" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.109:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.109:8080: connect: connection refused" Nov 21 15:44:08 crc kubenswrapper[4774]: I1121 15:44:08.684244 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:44:10 crc kubenswrapper[4774]: I1121 15:44:10.407176 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:44:10 crc kubenswrapper[4774]: I1121 15:44:10.529045 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6878d4cff5-g8xtp"] Nov 21 15:44:10 crc kubenswrapper[4774]: I1121 15:44:10.529581 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6878d4cff5-g8xtp" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon-log" containerID="cri-o://42d6bd695e199bfcdb1884d307a551bc95bddd0e1c9ebbd51a556455c0d20c63" gracePeriod=30 Nov 21 15:44:10 crc kubenswrapper[4774]: I1121 15:44:10.530173 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-6878d4cff5-g8xtp" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon" containerID="cri-o://24262b2bb12a39d7788af90994f29259b7132d7e56ff909ec3e15271f072095c" gracePeriod=30 Nov 21 15:44:11 crc kubenswrapper[4774]: I1121 15:44:11.650863 4774 generic.go:334] "Generic (PLEG): container finished" podID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerID="24262b2bb12a39d7788af90994f29259b7132d7e56ff909ec3e15271f072095c" exitCode=0 Nov 21 15:44:11 crc kubenswrapper[4774]: I1121 15:44:11.650930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6878d4cff5-g8xtp" event={"ID":"5338fd26-b1ac-479e-b193-171b2b3f09bc","Type":"ContainerDied","Data":"24262b2bb12a39d7788af90994f29259b7132d7e56ff909ec3e15271f072095c"} Nov 21 15:44:15 crc kubenswrapper[4774]: I1121 15:44:15.094341 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:44:15 crc kubenswrapper[4774]: E1121 15:44:15.095326 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.718729 4774 generic.go:334] "Generic (PLEG): container finished" podID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerID="dd01c5b637bb7e4ce7eb672c4654f3483df6035ed70e9c46a8081d6b7eb69e8b" exitCode=137 Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.719187 4774 generic.go:334] "Generic (PLEG): container finished" podID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerID="c587dd6664e16dc6c8f9c91ebb51e676a233207617e909016ab35e84007ec300" exitCode=137 Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.718840 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d85c745-gsdh6" event={"ID":"d0ec0af3-99c8-430d-b20d-f337375960d5","Type":"ContainerDied","Data":"dd01c5b637bb7e4ce7eb672c4654f3483df6035ed70e9c46a8081d6b7eb69e8b"} Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.719248 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d85c745-gsdh6" event={"ID":"d0ec0af3-99c8-430d-b20d-f337375960d5","Type":"ContainerDied","Data":"c587dd6664e16dc6c8f9c91ebb51e676a233207617e909016ab35e84007ec300"} Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.910547 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.959237 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ec0af3-99c8-430d-b20d-f337375960d5-logs\") pod \"d0ec0af3-99c8-430d-b20d-f337375960d5\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.959276 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d0ec0af3-99c8-430d-b20d-f337375960d5-horizon-secret-key\") pod \"d0ec0af3-99c8-430d-b20d-f337375960d5\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.959324 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5xg4\" (UniqueName: \"kubernetes.io/projected/d0ec0af3-99c8-430d-b20d-f337375960d5-kube-api-access-s5xg4\") pod \"d0ec0af3-99c8-430d-b20d-f337375960d5\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.959365 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-config-data\") pod \"d0ec0af3-99c8-430d-b20d-f337375960d5\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.959994 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0ec0af3-99c8-430d-b20d-f337375960d5-logs" (OuterVolumeSpecName: "logs") pod "d0ec0af3-99c8-430d-b20d-f337375960d5" (UID: "d0ec0af3-99c8-430d-b20d-f337375960d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.977600 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0ec0af3-99c8-430d-b20d-f337375960d5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d0ec0af3-99c8-430d-b20d-f337375960d5" (UID: "d0ec0af3-99c8-430d-b20d-f337375960d5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.977643 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0ec0af3-99c8-430d-b20d-f337375960d5-kube-api-access-s5xg4" (OuterVolumeSpecName: "kube-api-access-s5xg4") pod "d0ec0af3-99c8-430d-b20d-f337375960d5" (UID: "d0ec0af3-99c8-430d-b20d-f337375960d5"). InnerVolumeSpecName "kube-api-access-s5xg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:44:17 crc kubenswrapper[4774]: I1121 15:44:17.993858 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-config-data" (OuterVolumeSpecName: "config-data") pod "d0ec0af3-99c8-430d-b20d-f337375960d5" (UID: "d0ec0af3-99c8-430d-b20d-f337375960d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.060979 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-scripts\") pod \"d0ec0af3-99c8-430d-b20d-f337375960d5\" (UID: \"d0ec0af3-99c8-430d-b20d-f337375960d5\") " Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.061408 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ec0af3-99c8-430d-b20d-f337375960d5-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.061429 4774 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d0ec0af3-99c8-430d-b20d-f337375960d5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.061443 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5xg4\" (UniqueName: \"kubernetes.io/projected/d0ec0af3-99c8-430d-b20d-f337375960d5-kube-api-access-s5xg4\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.061453 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.080322 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-scripts" (OuterVolumeSpecName: "scripts") pod "d0ec0af3-99c8-430d-b20d-f337375960d5" (UID: "d0ec0af3-99c8-430d-b20d-f337375960d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.164044 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d0ec0af3-99c8-430d-b20d-f337375960d5-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.729913 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644d85c745-gsdh6" event={"ID":"d0ec0af3-99c8-430d-b20d-f337375960d5","Type":"ContainerDied","Data":"c91f4ed1a9bafc3ce464b5c0e807f35920467400eb0d8b91e8a07879ab47c036"} Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.729964 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644d85c745-gsdh6" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.730879 4774 scope.go:117] "RemoveContainer" containerID="dd01c5b637bb7e4ce7eb672c4654f3483df6035ed70e9c46a8081d6b7eb69e8b" Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.759175 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-644d85c745-gsdh6"] Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.766698 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-644d85c745-gsdh6"] Nov 21 15:44:18 crc kubenswrapper[4774]: I1121 15:44:18.902724 4774 scope.go:117] "RemoveContainer" containerID="c587dd6664e16dc6c8f9c91ebb51e676a233207617e909016ab35e84007ec300" Nov 21 15:44:20 crc kubenswrapper[4774]: I1121 15:44:20.110625 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" path="/var/lib/kubelet/pods/d0ec0af3-99c8-430d-b20d-f337375960d5/volumes" Nov 21 15:44:28 crc kubenswrapper[4774]: I1121 15:44:28.093602 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:44:28 crc kubenswrapper[4774]: E1121 15:44:28.094358 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:44:39 crc kubenswrapper[4774]: I1121 15:44:39.094114 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:44:39 crc kubenswrapper[4774]: E1121 15:44:39.095306 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:44:40 crc kubenswrapper[4774]: I1121 15:44:40.972410 4774 generic.go:334] "Generic (PLEG): container finished" podID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerID="42d6bd695e199bfcdb1884d307a551bc95bddd0e1c9ebbd51a556455c0d20c63" exitCode=137 Nov 21 15:44:40 crc kubenswrapper[4774]: I1121 15:44:40.972481 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6878d4cff5-g8xtp" event={"ID":"5338fd26-b1ac-479e-b193-171b2b3f09bc","Type":"ContainerDied","Data":"42d6bd695e199bfcdb1884d307a551bc95bddd0e1c9ebbd51a556455c0d20c63"} Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.147199 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.256488 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5338fd26-b1ac-479e-b193-171b2b3f09bc-logs\") pod \"5338fd26-b1ac-479e-b193-171b2b3f09bc\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.256578 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5338fd26-b1ac-479e-b193-171b2b3f09bc-horizon-secret-key\") pod \"5338fd26-b1ac-479e-b193-171b2b3f09bc\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.256630 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-config-data\") pod \"5338fd26-b1ac-479e-b193-171b2b3f09bc\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.256835 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vfsk\" (UniqueName: \"kubernetes.io/projected/5338fd26-b1ac-479e-b193-171b2b3f09bc-kube-api-access-4vfsk\") pod \"5338fd26-b1ac-479e-b193-171b2b3f09bc\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.257197 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5338fd26-b1ac-479e-b193-171b2b3f09bc-logs" (OuterVolumeSpecName: "logs") pod "5338fd26-b1ac-479e-b193-171b2b3f09bc" (UID: "5338fd26-b1ac-479e-b193-171b2b3f09bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.257454 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-scripts\") pod \"5338fd26-b1ac-479e-b193-171b2b3f09bc\" (UID: \"5338fd26-b1ac-479e-b193-171b2b3f09bc\") " Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.258156 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5338fd26-b1ac-479e-b193-171b2b3f09bc-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.263796 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5338fd26-b1ac-479e-b193-171b2b3f09bc-kube-api-access-4vfsk" (OuterVolumeSpecName: "kube-api-access-4vfsk") pod "5338fd26-b1ac-479e-b193-171b2b3f09bc" (UID: "5338fd26-b1ac-479e-b193-171b2b3f09bc"). InnerVolumeSpecName "kube-api-access-4vfsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.264566 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5338fd26-b1ac-479e-b193-171b2b3f09bc-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5338fd26-b1ac-479e-b193-171b2b3f09bc" (UID: "5338fd26-b1ac-479e-b193-171b2b3f09bc"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.284597 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-scripts" (OuterVolumeSpecName: "scripts") pod "5338fd26-b1ac-479e-b193-171b2b3f09bc" (UID: "5338fd26-b1ac-479e-b193-171b2b3f09bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.286497 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-config-data" (OuterVolumeSpecName: "config-data") pod "5338fd26-b1ac-479e-b193-171b2b3f09bc" (UID: "5338fd26-b1ac-479e-b193-171b2b3f09bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.359772 4774 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5338fd26-b1ac-479e-b193-171b2b3f09bc-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.359808 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.359830 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vfsk\" (UniqueName: \"kubernetes.io/projected/5338fd26-b1ac-479e-b193-171b2b3f09bc-kube-api-access-4vfsk\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.359842 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5338fd26-b1ac-479e-b193-171b2b3f09bc-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.987503 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6878d4cff5-g8xtp" event={"ID":"5338fd26-b1ac-479e-b193-171b2b3f09bc","Type":"ContainerDied","Data":"46b5e5ab06e2db632a14eca293571d735f8d7901b45e18e9dfca1085db2185e9"} Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.987905 4774 scope.go:117] "RemoveContainer" containerID="24262b2bb12a39d7788af90994f29259b7132d7e56ff909ec3e15271f072095c" Nov 21 15:44:41 crc kubenswrapper[4774]: I1121 15:44:41.987612 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6878d4cff5-g8xtp" Nov 21 15:44:42 crc kubenswrapper[4774]: I1121 15:44:42.030221 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6878d4cff5-g8xtp"] Nov 21 15:44:42 crc kubenswrapper[4774]: I1121 15:44:42.043676 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6878d4cff5-g8xtp"] Nov 21 15:44:42 crc kubenswrapper[4774]: I1121 15:44:42.109153 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" path="/var/lib/kubelet/pods/5338fd26-b1ac-479e-b193-171b2b3f09bc/volumes" Nov 21 15:44:42 crc kubenswrapper[4774]: I1121 15:44:42.186512 4774 scope.go:117] "RemoveContainer" containerID="42d6bd695e199bfcdb1884d307a551bc95bddd0e1c9ebbd51a556455c0d20c63" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.936838 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-plp5f"] Nov 21 15:44:44 crc kubenswrapper[4774]: E1121 15:44:44.937614 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937632 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon" Nov 21 15:44:44 crc kubenswrapper[4774]: E1121 15:44:44.937654 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937661 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon" Nov 21 15:44:44 crc kubenswrapper[4774]: E1121 15:44:44.937704 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon-log" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937711 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon-log" Nov 21 15:44:44 crc kubenswrapper[4774]: E1121 15:44:44.937724 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon-log" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937729 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon-log" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937948 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937963 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon-log" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937972 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5338fd26-b1ac-479e-b193-171b2b3f09bc" containerName="horizon" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.937982 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0ec0af3-99c8-430d-b20d-f337375960d5" containerName="horizon-log" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.939947 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:44 crc kubenswrapper[4774]: I1121 15:44:44.952224 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-plp5f"] Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.044632 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhr8j\" (UniqueName: \"kubernetes.io/projected/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-kube-api-access-hhr8j\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.045323 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-utilities\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.045465 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-catalog-content\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.046911 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-a58a-account-create-76brk"] Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.055728 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-a58a-account-create-76brk"] Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.147796 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhr8j\" (UniqueName: \"kubernetes.io/projected/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-kube-api-access-hhr8j\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.147871 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-utilities\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.147914 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-catalog-content\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.148557 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-utilities\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.148599 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-catalog-content\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.169310 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhr8j\" (UniqueName: \"kubernetes.io/projected/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-kube-api-access-hhr8j\") pod \"community-operators-plp5f\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.265452 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:45 crc kubenswrapper[4774]: I1121 15:44:45.760206 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-plp5f"] Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.030517 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-t74fs"] Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.041556 4774 generic.go:334] "Generic (PLEG): container finished" podID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerID="03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3" exitCode=0 Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.041596 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerDied","Data":"03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3"} Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.041622 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerStarted","Data":"b67856f5d5aa12119423bbde88caff57219560658d967c0c2f00fb6ded0bb450"} Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.042981 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-t74fs"] Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.043865 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.107617 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f1ec2bf-6208-463d-bf73-d1ec3263cdef" path="/var/lib/kubelet/pods/1f1ec2bf-6208-463d-bf73-d1ec3263cdef/volumes" Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.109339 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53dc7a4a-0286-432c-a2c0-6c9cab003290" path="/var/lib/kubelet/pods/53dc7a4a-0286-432c-a2c0-6c9cab003290/volumes" Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.465513 4774 scope.go:117] "RemoveContainer" containerID="1c4cb192de35ed0ac5a46dfc464a4dc19a2b5104da2bb38fb403db82e0ad7759" Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.492460 4774 scope.go:117] "RemoveContainer" containerID="a5a7334c429102027fa706dde1a6770f13fc69a405d16fafc086400cf06ab22a" Nov 21 15:44:46 crc kubenswrapper[4774]: I1121 15:44:46.536021 4774 scope.go:117] "RemoveContainer" containerID="418635577c42296bc9576b37aa34b3850aa16df360df6e0eed199fb4b4f65a4c" Nov 21 15:44:47 crc kubenswrapper[4774]: I1121 15:44:47.054908 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerStarted","Data":"7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1"} Nov 21 15:44:48 crc kubenswrapper[4774]: I1121 15:44:48.068950 4774 generic.go:334] "Generic (PLEG): container finished" podID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerID="7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1" exitCode=0 Nov 21 15:44:48 crc kubenswrapper[4774]: I1121 15:44:48.069022 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerDied","Data":"7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1"} Nov 21 15:44:49 crc kubenswrapper[4774]: I1121 15:44:49.079549 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerStarted","Data":"150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419"} Nov 21 15:44:49 crc kubenswrapper[4774]: I1121 15:44:49.104300 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-plp5f" podStartSLOduration=2.673184017 podStartE2EDuration="5.104278398s" podCreationTimestamp="2025-11-21 15:44:44 +0000 UTC" firstStartedPulling="2025-11-21 15:44:46.04362923 +0000 UTC m=+6076.695828489" lastFinishedPulling="2025-11-21 15:44:48.474723621 +0000 UTC m=+6079.126922870" observedRunningTime="2025-11-21 15:44:49.101806307 +0000 UTC m=+6079.754005566" watchObservedRunningTime="2025-11-21 15:44:49.104278398 +0000 UTC m=+6079.756477677" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.125356 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-846b7d466c-wl6n8"] Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.127457 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.159120 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-846b7d466c-wl6n8"] Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.209395 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ac90da4-62d3-4985-83da-d106def413db-horizon-secret-key\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.209476 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ac90da4-62d3-4985-83da-d106def413db-scripts\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.209530 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ac90da4-62d3-4985-83da-d106def413db-logs\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.209676 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrd8b\" (UniqueName: \"kubernetes.io/projected/4ac90da4-62d3-4985-83da-d106def413db-kube-api-access-hrd8b\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.209837 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ac90da4-62d3-4985-83da-d106def413db-config-data\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.312283 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ac90da4-62d3-4985-83da-d106def413db-config-data\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.312506 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ac90da4-62d3-4985-83da-d106def413db-horizon-secret-key\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.312567 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ac90da4-62d3-4985-83da-d106def413db-scripts\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.312609 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ac90da4-62d3-4985-83da-d106def413db-logs\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.312779 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrd8b\" (UniqueName: \"kubernetes.io/projected/4ac90da4-62d3-4985-83da-d106def413db-kube-api-access-hrd8b\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.313116 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ac90da4-62d3-4985-83da-d106def413db-logs\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.313456 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4ac90da4-62d3-4985-83da-d106def413db-scripts\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.313836 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4ac90da4-62d3-4985-83da-d106def413db-config-data\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.318305 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/4ac90da4-62d3-4985-83da-d106def413db-horizon-secret-key\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.330337 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrd8b\" (UniqueName: \"kubernetes.io/projected/4ac90da4-62d3-4985-83da-d106def413db-kube-api-access-hrd8b\") pod \"horizon-846b7d466c-wl6n8\" (UID: \"4ac90da4-62d3-4985-83da-d106def413db\") " pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.451400 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:44:53 crc kubenswrapper[4774]: I1121 15:44:53.932338 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-846b7d466c-wl6n8"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.043716 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-b97th"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.058842 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-b97th"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.099640 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:44:54 crc kubenswrapper[4774]: E1121 15:44:54.099910 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.105424 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bdd8a09-e5ca-4330-b5d5-cda5f2e46471" path="/var/lib/kubelet/pods/4bdd8a09-e5ca-4330-b5d5-cda5f2e46471/volumes" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.135216 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-846b7d466c-wl6n8" event={"ID":"4ac90da4-62d3-4985-83da-d106def413db","Type":"ContainerStarted","Data":"3c1d1e2ad778358a32c3a2a978d327e949a06271dcf14b79cb0f3a2f2e71036f"} Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.135295 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-846b7d466c-wl6n8" event={"ID":"4ac90da4-62d3-4985-83da-d106def413db","Type":"ContainerStarted","Data":"3d488562873d6f28da5cab4df34b9aeba23d3fea2e40f7cc2b564aff7e0ca564"} Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.759601 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-create-5v749"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.761466 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5v749" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.776065 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-5v749"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.839674 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cec4856-7315-4613-a792-92590e41b9ee-operator-scripts\") pod \"heat-db-create-5v749\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " pod="openstack/heat-db-create-5v749" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.839782 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85w5h\" (UniqueName: \"kubernetes.io/projected/1cec4856-7315-4613-a792-92590e41b9ee-kube-api-access-85w5h\") pod \"heat-db-create-5v749\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " pod="openstack/heat-db-create-5v749" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.861104 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-a2aa-account-create-rplvw"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.862420 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.865007 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-db-secret" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.872171 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-a2aa-account-create-rplvw"] Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.941264 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm2dl\" (UniqueName: \"kubernetes.io/projected/42c4bad9-a76d-46e9-8933-ab0a80018ae8-kube-api-access-rm2dl\") pod \"heat-a2aa-account-create-rplvw\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.941357 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c4bad9-a76d-46e9-8933-ab0a80018ae8-operator-scripts\") pod \"heat-a2aa-account-create-rplvw\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.941390 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cec4856-7315-4613-a792-92590e41b9ee-operator-scripts\") pod \"heat-db-create-5v749\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " pod="openstack/heat-db-create-5v749" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.941533 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85w5h\" (UniqueName: \"kubernetes.io/projected/1cec4856-7315-4613-a792-92590e41b9ee-kube-api-access-85w5h\") pod \"heat-db-create-5v749\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " pod="openstack/heat-db-create-5v749" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.942039 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cec4856-7315-4613-a792-92590e41b9ee-operator-scripts\") pod \"heat-db-create-5v749\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " pod="openstack/heat-db-create-5v749" Nov 21 15:44:54 crc kubenswrapper[4774]: I1121 15:44:54.966581 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85w5h\" (UniqueName: \"kubernetes.io/projected/1cec4856-7315-4613-a792-92590e41b9ee-kube-api-access-85w5h\") pod \"heat-db-create-5v749\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " pod="openstack/heat-db-create-5v749" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.043433 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c4bad9-a76d-46e9-8933-ab0a80018ae8-operator-scripts\") pod \"heat-a2aa-account-create-rplvw\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.043603 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm2dl\" (UniqueName: \"kubernetes.io/projected/42c4bad9-a76d-46e9-8933-ab0a80018ae8-kube-api-access-rm2dl\") pod \"heat-a2aa-account-create-rplvw\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.044373 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c4bad9-a76d-46e9-8933-ab0a80018ae8-operator-scripts\") pod \"heat-a2aa-account-create-rplvw\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.086866 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5v749" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.090497 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm2dl\" (UniqueName: \"kubernetes.io/projected/42c4bad9-a76d-46e9-8933-ab0a80018ae8-kube-api-access-rm2dl\") pod \"heat-a2aa-account-create-rplvw\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.151784 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-846b7d466c-wl6n8" event={"ID":"4ac90da4-62d3-4985-83da-d106def413db","Type":"ContainerStarted","Data":"7e46764cd94dec8ad96ba9240fa3b23a7c5ad4322673360caf7485426f4f0339"} Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.182381 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.210772 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-846b7d466c-wl6n8" podStartSLOduration=2.210753081 podStartE2EDuration="2.210753081s" podCreationTimestamp="2025-11-21 15:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:44:55.192511391 +0000 UTC m=+6085.844710660" watchObservedRunningTime="2025-11-21 15:44:55.210753081 +0000 UTC m=+6085.862952340" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.270070 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.281916 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.352121 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.660460 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-create-5v749"] Nov 21 15:44:55 crc kubenswrapper[4774]: W1121 15:44:55.802680 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42c4bad9_a76d_46e9_8933_ab0a80018ae8.slice/crio-71985a9f0819cefe425f1f3fa6cc7640af37eb87bce98d8e41852e6da2a29de4 WatchSource:0}: Error finding container 71985a9f0819cefe425f1f3fa6cc7640af37eb87bce98d8e41852e6da2a29de4: Status 404 returned error can't find the container with id 71985a9f0819cefe425f1f3fa6cc7640af37eb87bce98d8e41852e6da2a29de4 Nov 21 15:44:55 crc kubenswrapper[4774]: I1121 15:44:55.806559 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-a2aa-account-create-rplvw"] Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.178981 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a2aa-account-create-rplvw" event={"ID":"42c4bad9-a76d-46e9-8933-ab0a80018ae8","Type":"ContainerStarted","Data":"85ebe1cd0516ff156cb98bd329ac1185deaf3319793a2c42cae450bb4bb3cc38"} Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.179313 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a2aa-account-create-rplvw" event={"ID":"42c4bad9-a76d-46e9-8933-ab0a80018ae8","Type":"ContainerStarted","Data":"71985a9f0819cefe425f1f3fa6cc7640af37eb87bce98d8e41852e6da2a29de4"} Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.184757 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5v749" event={"ID":"1cec4856-7315-4613-a792-92590e41b9ee","Type":"ContainerStarted","Data":"3d71a078c88580f3ab785fc4e1313f543222412c9d7ca4a0e51a38730d63d2ee"} Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.184838 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5v749" event={"ID":"1cec4856-7315-4613-a792-92590e41b9ee","Type":"ContainerStarted","Data":"fac838872edf88faf1199a36018fcd40fea687e5a18973f661536a6fad81b876"} Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.199647 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-a2aa-account-create-rplvw" podStartSLOduration=2.199618301 podStartE2EDuration="2.199618301s" podCreationTimestamp="2025-11-21 15:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:44:56.19503696 +0000 UTC m=+6086.847236219" watchObservedRunningTime="2025-11-21 15:44:56.199618301 +0000 UTC m=+6086.851817560" Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.230656 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-create-5v749" podStartSLOduration=2.230610354 podStartE2EDuration="2.230610354s" podCreationTimestamp="2025-11-21 15:44:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:44:56.220179517 +0000 UTC m=+6086.872378776" watchObservedRunningTime="2025-11-21 15:44:56.230610354 +0000 UTC m=+6086.882809613" Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.254304 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:56 crc kubenswrapper[4774]: I1121 15:44:56.301363 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-plp5f"] Nov 21 15:44:57 crc kubenswrapper[4774]: I1121 15:44:57.194557 4774 generic.go:334] "Generic (PLEG): container finished" podID="42c4bad9-a76d-46e9-8933-ab0a80018ae8" containerID="85ebe1cd0516ff156cb98bd329ac1185deaf3319793a2c42cae450bb4bb3cc38" exitCode=0 Nov 21 15:44:57 crc kubenswrapper[4774]: I1121 15:44:57.194599 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a2aa-account-create-rplvw" event={"ID":"42c4bad9-a76d-46e9-8933-ab0a80018ae8","Type":"ContainerDied","Data":"85ebe1cd0516ff156cb98bd329ac1185deaf3319793a2c42cae450bb4bb3cc38"} Nov 21 15:44:57 crc kubenswrapper[4774]: I1121 15:44:57.196702 4774 generic.go:334] "Generic (PLEG): container finished" podID="1cec4856-7315-4613-a792-92590e41b9ee" containerID="3d71a078c88580f3ab785fc4e1313f543222412c9d7ca4a0e51a38730d63d2ee" exitCode=0 Nov 21 15:44:57 crc kubenswrapper[4774]: I1121 15:44:57.196743 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5v749" event={"ID":"1cec4856-7315-4613-a792-92590e41b9ee","Type":"ContainerDied","Data":"3d71a078c88580f3ab785fc4e1313f543222412c9d7ca4a0e51a38730d63d2ee"} Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.209205 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-plp5f" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="registry-server" containerID="cri-o://150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419" gracePeriod=2 Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.558736 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.634422 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm2dl\" (UniqueName: \"kubernetes.io/projected/42c4bad9-a76d-46e9-8933-ab0a80018ae8-kube-api-access-rm2dl\") pod \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.634766 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c4bad9-a76d-46e9-8933-ab0a80018ae8-operator-scripts\") pod \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\" (UID: \"42c4bad9-a76d-46e9-8933-ab0a80018ae8\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.642718 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42c4bad9-a76d-46e9-8933-ab0a80018ae8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "42c4bad9-a76d-46e9-8933-ab0a80018ae8" (UID: "42c4bad9-a76d-46e9-8933-ab0a80018ae8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.645414 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42c4bad9-a76d-46e9-8933-ab0a80018ae8-kube-api-access-rm2dl" (OuterVolumeSpecName: "kube-api-access-rm2dl") pod "42c4bad9-a76d-46e9-8933-ab0a80018ae8" (UID: "42c4bad9-a76d-46e9-8933-ab0a80018ae8"). InnerVolumeSpecName "kube-api-access-rm2dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.699944 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5v749" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.710735 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.737391 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42c4bad9-a76d-46e9-8933-ab0a80018ae8-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.737430 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm2dl\" (UniqueName: \"kubernetes.io/projected/42c4bad9-a76d-46e9-8933-ab0a80018ae8-kube-api-access-rm2dl\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839021 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cec4856-7315-4613-a792-92590e41b9ee-operator-scripts\") pod \"1cec4856-7315-4613-a792-92590e41b9ee\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839110 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85w5h\" (UniqueName: \"kubernetes.io/projected/1cec4856-7315-4613-a792-92590e41b9ee-kube-api-access-85w5h\") pod \"1cec4856-7315-4613-a792-92590e41b9ee\" (UID: \"1cec4856-7315-4613-a792-92590e41b9ee\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839181 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-catalog-content\") pod \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839309 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-utilities\") pod \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839385 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhr8j\" (UniqueName: \"kubernetes.io/projected/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-kube-api-access-hhr8j\") pod \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\" (UID: \"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219\") " Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839517 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cec4856-7315-4613-a792-92590e41b9ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1cec4856-7315-4613-a792-92590e41b9ee" (UID: "1cec4856-7315-4613-a792-92590e41b9ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839891 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1cec4856-7315-4613-a792-92590e41b9ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.839984 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-utilities" (OuterVolumeSpecName: "utilities") pod "ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" (UID: "ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.842776 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-kube-api-access-hhr8j" (OuterVolumeSpecName: "kube-api-access-hhr8j") pod "ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" (UID: "ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219"). InnerVolumeSpecName "kube-api-access-hhr8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.843289 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cec4856-7315-4613-a792-92590e41b9ee-kube-api-access-85w5h" (OuterVolumeSpecName: "kube-api-access-85w5h") pod "1cec4856-7315-4613-a792-92590e41b9ee" (UID: "1cec4856-7315-4613-a792-92590e41b9ee"). InnerVolumeSpecName "kube-api-access-85w5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.883277 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" (UID: "ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.942272 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.942354 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhr8j\" (UniqueName: \"kubernetes.io/projected/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-kube-api-access-hhr8j\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.942370 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85w5h\" (UniqueName: \"kubernetes.io/projected/1cec4856-7315-4613-a792-92590e41b9ee-kube-api-access-85w5h\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:58 crc kubenswrapper[4774]: I1121 15:44:58.942382 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.221752 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-create-5v749" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.221748 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-create-5v749" event={"ID":"1cec4856-7315-4613-a792-92590e41b9ee","Type":"ContainerDied","Data":"fac838872edf88faf1199a36018fcd40fea687e5a18973f661536a6fad81b876"} Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.221858 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fac838872edf88faf1199a36018fcd40fea687e5a18973f661536a6fad81b876" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.226639 4774 generic.go:334] "Generic (PLEG): container finished" podID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerID="150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419" exitCode=0 Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.226728 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-plp5f" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.226733 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerDied","Data":"150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419"} Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.226779 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-plp5f" event={"ID":"ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219","Type":"ContainerDied","Data":"b67856f5d5aa12119423bbde88caff57219560658d967c0c2f00fb6ded0bb450"} Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.226806 4774 scope.go:117] "RemoveContainer" containerID="150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.234024 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-a2aa-account-create-rplvw" event={"ID":"42c4bad9-a76d-46e9-8933-ab0a80018ae8","Type":"ContainerDied","Data":"71985a9f0819cefe425f1f3fa6cc7640af37eb87bce98d8e41852e6da2a29de4"} Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.234076 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71985a9f0819cefe425f1f3fa6cc7640af37eb87bce98d8e41852e6da2a29de4" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.234109 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-a2aa-account-create-rplvw" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.266085 4774 scope.go:117] "RemoveContainer" containerID="7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.285078 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-plp5f"] Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.293635 4774 scope.go:117] "RemoveContainer" containerID="03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.295342 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-plp5f"] Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.311991 4774 scope.go:117] "RemoveContainer" containerID="150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419" Nov 21 15:44:59 crc kubenswrapper[4774]: E1121 15:44:59.312424 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419\": container with ID starting with 150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419 not found: ID does not exist" containerID="150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.312470 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419"} err="failed to get container status \"150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419\": rpc error: code = NotFound desc = could not find container \"150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419\": container with ID starting with 150cf9c393b0863a3b92792ab0e85b3470fb7fdc31688d4d46e3a840f1c59419 not found: ID does not exist" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.312503 4774 scope.go:117] "RemoveContainer" containerID="7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1" Nov 21 15:44:59 crc kubenswrapper[4774]: E1121 15:44:59.312932 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1\": container with ID starting with 7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1 not found: ID does not exist" containerID="7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.312996 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1"} err="failed to get container status \"7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1\": rpc error: code = NotFound desc = could not find container \"7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1\": container with ID starting with 7416486bc0a1744d86bca8439430cb993c4390d412611d20bdf997d466e3d6f1 not found: ID does not exist" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.313025 4774 scope.go:117] "RemoveContainer" containerID="03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3" Nov 21 15:44:59 crc kubenswrapper[4774]: E1121 15:44:59.313252 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3\": container with ID starting with 03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3 not found: ID does not exist" containerID="03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3" Nov 21 15:44:59 crc kubenswrapper[4774]: I1121 15:44:59.313279 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3"} err="failed to get container status \"03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3\": rpc error: code = NotFound desc = could not find container \"03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3\": container with ID starting with 03ba38b38d1b68212fc98439f2eae991b80e19a8cfda10cc1cd77f7f0ad89ad3 not found: ID does not exist" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.074624 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-db-sync-2z6xm"] Nov 21 15:45:00 crc kubenswrapper[4774]: E1121 15:45:00.075462 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="registry-server" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.075490 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="registry-server" Nov 21 15:45:00 crc kubenswrapper[4774]: E1121 15:45:00.075518 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cec4856-7315-4613-a792-92590e41b9ee" containerName="mariadb-database-create" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.075529 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cec4856-7315-4613-a792-92590e41b9ee" containerName="mariadb-database-create" Nov 21 15:45:00 crc kubenswrapper[4774]: E1121 15:45:00.075547 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42c4bad9-a76d-46e9-8933-ab0a80018ae8" containerName="mariadb-account-create" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.075556 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="42c4bad9-a76d-46e9-8933-ab0a80018ae8" containerName="mariadb-account-create" Nov 21 15:45:00 crc kubenswrapper[4774]: E1121 15:45:00.075578 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="extract-utilities" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.075586 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="extract-utilities" Nov 21 15:45:00 crc kubenswrapper[4774]: E1121 15:45:00.075602 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="extract-content" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.075609 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="extract-content" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.077945 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" containerName="registry-server" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.077980 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="42c4bad9-a76d-46e9-8933-ab0a80018ae8" containerName="mariadb-account-create" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.078006 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cec4856-7315-4613-a792-92590e41b9ee" containerName="mariadb-database-create" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.078981 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.087547 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.092783 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-85pjg" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.109622 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219" path="/var/lib/kubelet/pods/ea571d99-6dfe-4bb9-b4d4-ea3b2c0d9219/volumes" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.110338 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-2z6xm"] Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.169061 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-config-data\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.169212 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-combined-ca-bundle\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.169317 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sznc2\" (UniqueName: \"kubernetes.io/projected/103b1e47-f61b-47ee-8de4-a6a4f83cc316-kube-api-access-sznc2\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.214890 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p"] Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.216745 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.221168 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.236073 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.244300 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p"] Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.281161 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f72fcfa5-b81f-448f-8aa5-0134d627c92b-secret-volume\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.281218 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-combined-ca-bundle\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.281431 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sznc2\" (UniqueName: \"kubernetes.io/projected/103b1e47-f61b-47ee-8de4-a6a4f83cc316-kube-api-access-sznc2\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.281567 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrp6l\" (UniqueName: \"kubernetes.io/projected/f72fcfa5-b81f-448f-8aa5-0134d627c92b-kube-api-access-qrp6l\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.281677 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-config-data\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.281781 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f72fcfa5-b81f-448f-8aa5-0134d627c92b-config-volume\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.302071 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-config-data\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.314604 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-combined-ca-bundle\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.327375 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sznc2\" (UniqueName: \"kubernetes.io/projected/103b1e47-f61b-47ee-8de4-a6a4f83cc316-kube-api-access-sznc2\") pod \"heat-db-sync-2z6xm\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.383478 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrp6l\" (UniqueName: \"kubernetes.io/projected/f72fcfa5-b81f-448f-8aa5-0134d627c92b-kube-api-access-qrp6l\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.383593 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f72fcfa5-b81f-448f-8aa5-0134d627c92b-config-volume\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.383624 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f72fcfa5-b81f-448f-8aa5-0134d627c92b-secret-volume\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.384814 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f72fcfa5-b81f-448f-8aa5-0134d627c92b-config-volume\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.391401 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f72fcfa5-b81f-448f-8aa5-0134d627c92b-secret-volume\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.398219 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.402620 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrp6l\" (UniqueName: \"kubernetes.io/projected/f72fcfa5-b81f-448f-8aa5-0134d627c92b-kube-api-access-qrp6l\") pod \"collect-profiles-29395665-6c56p\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.565594 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:00 crc kubenswrapper[4774]: I1121 15:45:00.880116 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-db-sync-2z6xm"] Nov 21 15:45:00 crc kubenswrapper[4774]: W1121 15:45:00.884073 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod103b1e47_f61b_47ee_8de4_a6a4f83cc316.slice/crio-6e73a3deafce05cc3338003d2a7cec2695b837c886c6be48310585a0afb41018 WatchSource:0}: Error finding container 6e73a3deafce05cc3338003d2a7cec2695b837c886c6be48310585a0afb41018: Status 404 returned error can't find the container with id 6e73a3deafce05cc3338003d2a7cec2695b837c886c6be48310585a0afb41018 Nov 21 15:45:01 crc kubenswrapper[4774]: I1121 15:45:01.077559 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p"] Nov 21 15:45:01 crc kubenswrapper[4774]: W1121 15:45:01.082569 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf72fcfa5_b81f_448f_8aa5_0134d627c92b.slice/crio-4e002514dc12153074804af86fb0fad3f5f4e02c7eb4fdc4cdd66ce70ddbbfea WatchSource:0}: Error finding container 4e002514dc12153074804af86fb0fad3f5f4e02c7eb4fdc4cdd66ce70ddbbfea: Status 404 returned error can't find the container with id 4e002514dc12153074804af86fb0fad3f5f4e02c7eb4fdc4cdd66ce70ddbbfea Nov 21 15:45:01 crc kubenswrapper[4774]: I1121 15:45:01.268957 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-2z6xm" event={"ID":"103b1e47-f61b-47ee-8de4-a6a4f83cc316","Type":"ContainerStarted","Data":"6e73a3deafce05cc3338003d2a7cec2695b837c886c6be48310585a0afb41018"} Nov 21 15:45:01 crc kubenswrapper[4774]: I1121 15:45:01.270050 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" event={"ID":"f72fcfa5-b81f-448f-8aa5-0134d627c92b","Type":"ContainerStarted","Data":"4e002514dc12153074804af86fb0fad3f5f4e02c7eb4fdc4cdd66ce70ddbbfea"} Nov 21 15:45:02 crc kubenswrapper[4774]: I1121 15:45:02.279420 4774 generic.go:334] "Generic (PLEG): container finished" podID="f72fcfa5-b81f-448f-8aa5-0134d627c92b" containerID="8f765fb1a65ddb9f4235e669113327c96ddc6eb87e97f4bbc795be02a5234508" exitCode=0 Nov 21 15:45:02 crc kubenswrapper[4774]: I1121 15:45:02.279710 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" event={"ID":"f72fcfa5-b81f-448f-8aa5-0134d627c92b","Type":"ContainerDied","Data":"8f765fb1a65ddb9f4235e669113327c96ddc6eb87e97f4bbc795be02a5234508"} Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.452084 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.452455 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.676754 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.851112 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f72fcfa5-b81f-448f-8aa5-0134d627c92b-secret-volume\") pod \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.851360 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f72fcfa5-b81f-448f-8aa5-0134d627c92b-config-volume\") pod \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.851409 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrp6l\" (UniqueName: \"kubernetes.io/projected/f72fcfa5-b81f-448f-8aa5-0134d627c92b-kube-api-access-qrp6l\") pod \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\" (UID: \"f72fcfa5-b81f-448f-8aa5-0134d627c92b\") " Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.854225 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f72fcfa5-b81f-448f-8aa5-0134d627c92b-config-volume" (OuterVolumeSpecName: "config-volume") pod "f72fcfa5-b81f-448f-8aa5-0134d627c92b" (UID: "f72fcfa5-b81f-448f-8aa5-0134d627c92b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.859089 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f72fcfa5-b81f-448f-8aa5-0134d627c92b-kube-api-access-qrp6l" (OuterVolumeSpecName: "kube-api-access-qrp6l") pod "f72fcfa5-b81f-448f-8aa5-0134d627c92b" (UID: "f72fcfa5-b81f-448f-8aa5-0134d627c92b"). InnerVolumeSpecName "kube-api-access-qrp6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.859184 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72fcfa5-b81f-448f-8aa5-0134d627c92b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f72fcfa5-b81f-448f-8aa5-0134d627c92b" (UID: "f72fcfa5-b81f-448f-8aa5-0134d627c92b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.954242 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f72fcfa5-b81f-448f-8aa5-0134d627c92b-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.954283 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrp6l\" (UniqueName: \"kubernetes.io/projected/f72fcfa5-b81f-448f-8aa5-0134d627c92b-kube-api-access-qrp6l\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:03 crc kubenswrapper[4774]: I1121 15:45:03.954297 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f72fcfa5-b81f-448f-8aa5-0134d627c92b-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:04 crc kubenswrapper[4774]: I1121 15:45:04.297515 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" event={"ID":"f72fcfa5-b81f-448f-8aa5-0134d627c92b","Type":"ContainerDied","Data":"4e002514dc12153074804af86fb0fad3f5f4e02c7eb4fdc4cdd66ce70ddbbfea"} Nov 21 15:45:04 crc kubenswrapper[4774]: I1121 15:45:04.297734 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e002514dc12153074804af86fb0fad3f5f4e02c7eb4fdc4cdd66ce70ddbbfea" Nov 21 15:45:04 crc kubenswrapper[4774]: I1121 15:45:04.297841 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p" Nov 21 15:45:04 crc kubenswrapper[4774]: I1121 15:45:04.753349 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s"] Nov 21 15:45:04 crc kubenswrapper[4774]: I1121 15:45:04.765099 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395620-dzl5s"] Nov 21 15:45:06 crc kubenswrapper[4774]: I1121 15:45:06.105248 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97443f21-9db6-47a2-b6af-d508a06c69a2" path="/var/lib/kubelet/pods/97443f21-9db6-47a2-b6af-d508a06c69a2/volumes" Nov 21 15:45:08 crc kubenswrapper[4774]: I1121 15:45:08.350408 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-2z6xm" event={"ID":"103b1e47-f61b-47ee-8de4-a6a4f83cc316","Type":"ContainerStarted","Data":"4284d2cd568ab8bbda01fbbbfd23eeffbcacd6fe39275849d8f13444a8058555"} Nov 21 15:45:08 crc kubenswrapper[4774]: I1121 15:45:08.372775 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-db-sync-2z6xm" podStartSLOduration=1.188491635 podStartE2EDuration="8.372754942s" podCreationTimestamp="2025-11-21 15:45:00 +0000 UTC" firstStartedPulling="2025-11-21 15:45:00.886593449 +0000 UTC m=+6091.538792718" lastFinishedPulling="2025-11-21 15:45:08.070856766 +0000 UTC m=+6098.723056025" observedRunningTime="2025-11-21 15:45:08.366739691 +0000 UTC m=+6099.018938970" watchObservedRunningTime="2025-11-21 15:45:08.372754942 +0000 UTC m=+6099.024954201" Nov 21 15:45:09 crc kubenswrapper[4774]: I1121 15:45:09.093402 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:45:09 crc kubenswrapper[4774]: E1121 15:45:09.094062 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:45:11 crc kubenswrapper[4774]: I1121 15:45:11.379921 4774 generic.go:334] "Generic (PLEG): container finished" podID="103b1e47-f61b-47ee-8de4-a6a4f83cc316" containerID="4284d2cd568ab8bbda01fbbbfd23eeffbcacd6fe39275849d8f13444a8058555" exitCode=0 Nov 21 15:45:11 crc kubenswrapper[4774]: I1121 15:45:11.380039 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-2z6xm" event={"ID":"103b1e47-f61b-47ee-8de4-a6a4f83cc316","Type":"ContainerDied","Data":"4284d2cd568ab8bbda01fbbbfd23eeffbcacd6fe39275849d8f13444a8058555"} Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.770470 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.843568 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-config-data\") pod \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.843758 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sznc2\" (UniqueName: \"kubernetes.io/projected/103b1e47-f61b-47ee-8de4-a6a4f83cc316-kube-api-access-sznc2\") pod \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.843790 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-combined-ca-bundle\") pod \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\" (UID: \"103b1e47-f61b-47ee-8de4-a6a4f83cc316\") " Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.850583 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/103b1e47-f61b-47ee-8de4-a6a4f83cc316-kube-api-access-sznc2" (OuterVolumeSpecName: "kube-api-access-sznc2") pod "103b1e47-f61b-47ee-8de4-a6a4f83cc316" (UID: "103b1e47-f61b-47ee-8de4-a6a4f83cc316"). InnerVolumeSpecName "kube-api-access-sznc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.874792 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "103b1e47-f61b-47ee-8de4-a6a4f83cc316" (UID: "103b1e47-f61b-47ee-8de4-a6a4f83cc316"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.927242 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-config-data" (OuterVolumeSpecName: "config-data") pod "103b1e47-f61b-47ee-8de4-a6a4f83cc316" (UID: "103b1e47-f61b-47ee-8de4-a6a4f83cc316"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.946037 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sznc2\" (UniqueName: \"kubernetes.io/projected/103b1e47-f61b-47ee-8de4-a6a4f83cc316-kube-api-access-sznc2\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.946077 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:12.946093 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/103b1e47-f61b-47ee-8de4-a6a4f83cc316-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:13.399894 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-db-sync-2z6xm" event={"ID":"103b1e47-f61b-47ee-8de4-a6a4f83cc316","Type":"ContainerDied","Data":"6e73a3deafce05cc3338003d2a7cec2695b837c886c6be48310585a0afb41018"} Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:13.400117 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e73a3deafce05cc3338003d2a7cec2695b837c886c6be48310585a0afb41018" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:13.399935 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/heat-db-sync-2z6xm" Nov 21 15:45:13 crc kubenswrapper[4774]: I1121 15:45:13.454314 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-846b7d466c-wl6n8" podUID="4ac90da4-62d3-4985-83da-d106def413db" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.115:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.115:8080: connect: connection refused" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.462412 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-engine-94df8c594-xh8vx"] Nov 21 15:45:14 crc kubenswrapper[4774]: E1121 15:45:14.463191 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="103b1e47-f61b-47ee-8de4-a6a4f83cc316" containerName="heat-db-sync" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.463205 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="103b1e47-f61b-47ee-8de4-a6a4f83cc316" containerName="heat-db-sync" Nov 21 15:45:14 crc kubenswrapper[4774]: E1121 15:45:14.463215 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f72fcfa5-b81f-448f-8aa5-0134d627c92b" containerName="collect-profiles" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.463223 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f72fcfa5-b81f-448f-8aa5-0134d627c92b" containerName="collect-profiles" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.463419 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f72fcfa5-b81f-448f-8aa5-0134d627c92b" containerName="collect-profiles" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.463439 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="103b1e47-f61b-47ee-8de4-a6a4f83cc316" containerName="heat-db-sync" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.465808 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.469571 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-heat-dockercfg-85pjg" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.472105 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-engine-config-data" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.476623 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-config-data" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.506291 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-94df8c594-xh8vx"] Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.558337 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-api-5958684764-r77rb"] Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.562443 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.568587 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-api-config-data" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.581611 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-combined-ca-bundle\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.581797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8snr\" (UniqueName: \"kubernetes.io/projected/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-kube-api-access-r8snr\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.581874 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-config-data\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.581903 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-config-data-custom\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.582505 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5958684764-r77rb"] Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.629887 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/heat-cfnapi-698d644556-pdzp8"] Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.631516 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.634976 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"heat-cfnapi-config-data" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.650082 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-698d644556-pdzp8"] Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684596 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs4m2\" (UniqueName: \"kubernetes.io/projected/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-kube-api-access-zs4m2\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684664 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjm2v\" (UniqueName: \"kubernetes.io/projected/b1f38ecd-a119-493e-bdf7-63e4b253586d-kube-api-access-bjm2v\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684774 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-combined-ca-bundle\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684834 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8snr\" (UniqueName: \"kubernetes.io/projected/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-kube-api-access-r8snr\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684877 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-combined-ca-bundle\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684921 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-config-data\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684946 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-config-data\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684975 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-config-data-custom\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.684999 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-config-data-custom\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.685050 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-config-data-custom\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.685104 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-combined-ca-bundle\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.685151 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-config-data\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.695706 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-config-data\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.696260 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-config-data-custom\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.698509 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-combined-ca-bundle\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.710655 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8snr\" (UniqueName: \"kubernetes.io/projected/19b1e55a-0939-4fba-97a1-b1d3c8d9e14a-kube-api-access-r8snr\") pod \"heat-engine-94df8c594-xh8vx\" (UID: \"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a\") " pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.787539 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-config-data-custom\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.787654 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-config-data\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.788512 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs4m2\" (UniqueName: \"kubernetes.io/projected/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-kube-api-access-zs4m2\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.788575 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjm2v\" (UniqueName: \"kubernetes.io/projected/b1f38ecd-a119-493e-bdf7-63e4b253586d-kube-api-access-bjm2v\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.788671 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-combined-ca-bundle\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.789195 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-combined-ca-bundle\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.789269 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-config-data\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.789398 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-config-data-custom\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.791123 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.794052 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-config-data-custom\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.794611 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-config-data\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.796805 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1f38ecd-a119-493e-bdf7-63e4b253586d-combined-ca-bundle\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.799266 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-config-data-custom\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.819862 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-config-data\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.820620 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-combined-ca-bundle\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.824181 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs4m2\" (UniqueName: \"kubernetes.io/projected/b6837766-9ca4-42d6-a7a8-15ce3cbb14aa-kube-api-access-zs4m2\") pod \"heat-api-5958684764-r77rb\" (UID: \"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa\") " pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.834917 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjm2v\" (UniqueName: \"kubernetes.io/projected/b1f38ecd-a119-493e-bdf7-63e4b253586d-kube-api-access-bjm2v\") pod \"heat-cfnapi-698d644556-pdzp8\" (UID: \"b1f38ecd-a119-493e-bdf7-63e4b253586d\") " pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.899313 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:14 crc kubenswrapper[4774]: I1121 15:45:14.964259 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:15 crc kubenswrapper[4774]: I1121 15:45:15.527588 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-engine-94df8c594-xh8vx"] Nov 21 15:45:15 crc kubenswrapper[4774]: I1121 15:45:15.604055 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-api-5958684764-r77rb"] Nov 21 15:45:15 crc kubenswrapper[4774]: W1121 15:45:15.608646 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6837766_9ca4_42d6_a7a8_15ce3cbb14aa.slice/crio-52348c1443e7f3996fb22db0a8dcf923e81a392a021002a88e68bbebb4fe1b65 WatchSource:0}: Error finding container 52348c1443e7f3996fb22db0a8dcf923e81a392a021002a88e68bbebb4fe1b65: Status 404 returned error can't find the container with id 52348c1443e7f3996fb22db0a8dcf923e81a392a021002a88e68bbebb4fe1b65 Nov 21 15:45:15 crc kubenswrapper[4774]: I1121 15:45:15.699183 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/heat-cfnapi-698d644556-pdzp8"] Nov 21 15:45:15 crc kubenswrapper[4774]: I1121 15:45:15.941490 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wctbc"] Nov 21 15:45:15 crc kubenswrapper[4774]: I1121 15:45:15.945264 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:15 crc kubenswrapper[4774]: I1121 15:45:15.964970 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wctbc"] Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.026489 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4vnj\" (UniqueName: \"kubernetes.io/projected/a2eb10cb-dec4-433b-991f-17ba8db390bc-kube-api-access-z4vnj\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.026595 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-utilities\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.026720 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-catalog-content\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.128254 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-utilities\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.128433 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-catalog-content\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.128529 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4vnj\" (UniqueName: \"kubernetes.io/projected/a2eb10cb-dec4-433b-991f-17ba8db390bc-kube-api-access-z4vnj\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.128768 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-utilities\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.130260 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-catalog-content\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.148650 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4vnj\" (UniqueName: \"kubernetes.io/projected/a2eb10cb-dec4-433b-991f-17ba8db390bc-kube-api-access-z4vnj\") pod \"redhat-marketplace-wctbc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.274920 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.456377 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-94df8c594-xh8vx" event={"ID":"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a","Type":"ContainerStarted","Data":"56fc9aa7609788bff71db3688c37270db2b79e12c3c044f1a3c8e485970a9af8"} Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.456793 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.456810 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-engine-94df8c594-xh8vx" event={"ID":"19b1e55a-0939-4fba-97a1-b1d3c8d9e14a","Type":"ContainerStarted","Data":"877ca41f1cb6f80210b599ae3df2ee0f71cba4e75af6f62dd7cffb1f9feeb603"} Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.474700 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-698d644556-pdzp8" event={"ID":"b1f38ecd-a119-493e-bdf7-63e4b253586d","Type":"ContainerStarted","Data":"8b3eed5e52feff49f7b64bcaabea2b8b5f58ea3676191672a0dcda0b794601f0"} Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.490389 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5958684764-r77rb" event={"ID":"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa","Type":"ContainerStarted","Data":"52348c1443e7f3996fb22db0a8dcf923e81a392a021002a88e68bbebb4fe1b65"} Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.496594 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-engine-94df8c594-xh8vx" podStartSLOduration=2.496574882 podStartE2EDuration="2.496574882s" podCreationTimestamp="2025-11-21 15:45:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:45:16.492459425 +0000 UTC m=+6107.144658684" watchObservedRunningTime="2025-11-21 15:45:16.496574882 +0000 UTC m=+6107.148774131" Nov 21 15:45:16 crc kubenswrapper[4774]: I1121 15:45:16.578256 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wctbc"] Nov 21 15:45:16 crc kubenswrapper[4774]: W1121 15:45:16.590360 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2eb10cb_dec4_433b_991f_17ba8db390bc.slice/crio-2fee994e8e7e9c251c9985c10a1ab9a8c4e08921185eee5a21c791db8efcf5e4 WatchSource:0}: Error finding container 2fee994e8e7e9c251c9985c10a1ab9a8c4e08921185eee5a21c791db8efcf5e4: Status 404 returned error can't find the container with id 2fee994e8e7e9c251c9985c10a1ab9a8c4e08921185eee5a21c791db8efcf5e4 Nov 21 15:45:17 crc kubenswrapper[4774]: I1121 15:45:17.504156 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerID="9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456" exitCode=0 Nov 21 15:45:17 crc kubenswrapper[4774]: I1121 15:45:17.504269 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wctbc" event={"ID":"a2eb10cb-dec4-433b-991f-17ba8db390bc","Type":"ContainerDied","Data":"9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456"} Nov 21 15:45:17 crc kubenswrapper[4774]: I1121 15:45:17.504542 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wctbc" event={"ID":"a2eb10cb-dec4-433b-991f-17ba8db390bc","Type":"ContainerStarted","Data":"2fee994e8e7e9c251c9985c10a1ab9a8c4e08921185eee5a21c791db8efcf5e4"} Nov 21 15:45:18 crc kubenswrapper[4774]: I1121 15:45:18.514609 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-cfnapi-698d644556-pdzp8" event={"ID":"b1f38ecd-a119-493e-bdf7-63e4b253586d","Type":"ContainerStarted","Data":"1e2b1502ed4e031847ae9b858c970f7cf2f4e759a2412114df90fdf49636b25d"} Nov 21 15:45:18 crc kubenswrapper[4774]: I1121 15:45:18.515148 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:18 crc kubenswrapper[4774]: I1121 15:45:18.516557 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/heat-api-5958684764-r77rb" event={"ID":"b6837766-9ca4-42d6-a7a8-15ce3cbb14aa","Type":"ContainerStarted","Data":"6b8f5a82e02b77ff55860a27b79866a5a273b24b2c38ad2c2f00c9cbc4a4ce7a"} Nov 21 15:45:18 crc kubenswrapper[4774]: I1121 15:45:18.516695 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:18 crc kubenswrapper[4774]: I1121 15:45:18.554066 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-cfnapi-698d644556-pdzp8" podStartSLOduration=2.224561709 podStartE2EDuration="4.554046674s" podCreationTimestamp="2025-11-21 15:45:14 +0000 UTC" firstStartedPulling="2025-11-21 15:45:15.701609831 +0000 UTC m=+6106.353809090" lastFinishedPulling="2025-11-21 15:45:18.031094796 +0000 UTC m=+6108.683294055" observedRunningTime="2025-11-21 15:45:18.541541847 +0000 UTC m=+6109.193741106" watchObservedRunningTime="2025-11-21 15:45:18.554046674 +0000 UTC m=+6109.206245933" Nov 21 15:45:18 crc kubenswrapper[4774]: I1121 15:45:18.563191 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/heat-api-5958684764-r77rb" podStartSLOduration=2.147112731 podStartE2EDuration="4.563157313s" podCreationTimestamp="2025-11-21 15:45:14 +0000 UTC" firstStartedPulling="2025-11-21 15:45:15.617972057 +0000 UTC m=+6106.270171326" lastFinishedPulling="2025-11-21 15:45:18.034016649 +0000 UTC m=+6108.686215908" observedRunningTime="2025-11-21 15:45:18.561810985 +0000 UTC m=+6109.214010244" watchObservedRunningTime="2025-11-21 15:45:18.563157313 +0000 UTC m=+6109.215356562" Nov 21 15:45:19 crc kubenswrapper[4774]: I1121 15:45:19.529109 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerID="ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a" exitCode=0 Nov 21 15:45:19 crc kubenswrapper[4774]: I1121 15:45:19.529169 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wctbc" event={"ID":"a2eb10cb-dec4-433b-991f-17ba8db390bc","Type":"ContainerDied","Data":"ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a"} Nov 21 15:45:21 crc kubenswrapper[4774]: I1121 15:45:21.093193 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:45:21 crc kubenswrapper[4774]: E1121 15:45:21.093994 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:45:21 crc kubenswrapper[4774]: I1121 15:45:21.585016 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wctbc" event={"ID":"a2eb10cb-dec4-433b-991f-17ba8db390bc","Type":"ContainerStarted","Data":"2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f"} Nov 21 15:45:21 crc kubenswrapper[4774]: I1121 15:45:21.602353 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wctbc" podStartSLOduration=4.10467661 podStartE2EDuration="6.602336549s" podCreationTimestamp="2025-11-21 15:45:15 +0000 UTC" firstStartedPulling="2025-11-21 15:45:17.842726256 +0000 UTC m=+6108.494925515" lastFinishedPulling="2025-11-21 15:45:20.340386195 +0000 UTC m=+6110.992585454" observedRunningTime="2025-11-21 15:45:21.601407932 +0000 UTC m=+6112.253607191" watchObservedRunningTime="2025-11-21 15:45:21.602336549 +0000 UTC m=+6112.254535808" Nov 21 15:45:24 crc kubenswrapper[4774]: I1121 15:45:24.040364 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-228f-account-create-47lmt"] Nov 21 15:45:24 crc kubenswrapper[4774]: I1121 15:45:24.050288 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-pkbg9"] Nov 21 15:45:24 crc kubenswrapper[4774]: I1121 15:45:24.060401 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-pkbg9"] Nov 21 15:45:24 crc kubenswrapper[4774]: I1121 15:45:24.069111 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-228f-account-create-47lmt"] Nov 21 15:45:24 crc kubenswrapper[4774]: I1121 15:45:24.104054 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66350a84-c1ad-4e88-a80e-63d338e03016" path="/var/lib/kubelet/pods/66350a84-c1ad-4e88-a80e-63d338e03016/volumes" Nov 21 15:45:24 crc kubenswrapper[4774]: I1121 15:45:24.104787 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a56a3c85-e130-4514-b602-a94b444454ad" path="/var/lib/kubelet/pods/a56a3c85-e130-4514-b602-a94b444454ad/volumes" Nov 21 15:45:25 crc kubenswrapper[4774]: I1121 15:45:25.452385 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.275601 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.275651 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.319366 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-api-5958684764-r77rb" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.332437 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.386382 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-cfnapi-698d644556-pdzp8" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.699893 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:26 crc kubenswrapper[4774]: I1121 15:45:26.750200 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wctbc"] Nov 21 15:45:27 crc kubenswrapper[4774]: I1121 15:45:27.382501 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-846b7d466c-wl6n8" Nov 21 15:45:27 crc kubenswrapper[4774]: I1121 15:45:27.442671 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b68ccc75c-xqkjc"] Nov 21 15:45:27 crc kubenswrapper[4774]: I1121 15:45:27.442915 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b68ccc75c-xqkjc" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon-log" containerID="cri-o://ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c" gracePeriod=30 Nov 21 15:45:27 crc kubenswrapper[4774]: I1121 15:45:27.443033 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-7b68ccc75c-xqkjc" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" containerID="cri-o://8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26" gracePeriod=30 Nov 21 15:45:28 crc kubenswrapper[4774]: I1121 15:45:28.663606 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wctbc" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="registry-server" containerID="cri-o://2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f" gracePeriod=2 Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.148486 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.349396 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4vnj\" (UniqueName: \"kubernetes.io/projected/a2eb10cb-dec4-433b-991f-17ba8db390bc-kube-api-access-z4vnj\") pod \"a2eb10cb-dec4-433b-991f-17ba8db390bc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.349597 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-utilities\") pod \"a2eb10cb-dec4-433b-991f-17ba8db390bc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.349912 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-catalog-content\") pod \"a2eb10cb-dec4-433b-991f-17ba8db390bc\" (UID: \"a2eb10cb-dec4-433b-991f-17ba8db390bc\") " Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.350571 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-utilities" (OuterVolumeSpecName: "utilities") pod "a2eb10cb-dec4-433b-991f-17ba8db390bc" (UID: "a2eb10cb-dec4-433b-991f-17ba8db390bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.355760 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2eb10cb-dec4-433b-991f-17ba8db390bc-kube-api-access-z4vnj" (OuterVolumeSpecName: "kube-api-access-z4vnj") pod "a2eb10cb-dec4-433b-991f-17ba8db390bc" (UID: "a2eb10cb-dec4-433b-991f-17ba8db390bc"). InnerVolumeSpecName "kube-api-access-z4vnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.368106 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2eb10cb-dec4-433b-991f-17ba8db390bc" (UID: "a2eb10cb-dec4-433b-991f-17ba8db390bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.452560 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.452620 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2eb10cb-dec4-433b-991f-17ba8db390bc-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.452647 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4vnj\" (UniqueName: \"kubernetes.io/projected/a2eb10cb-dec4-433b-991f-17ba8db390bc-kube-api-access-z4vnj\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.676310 4774 generic.go:334] "Generic (PLEG): container finished" podID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerID="2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f" exitCode=0 Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.676570 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wctbc" event={"ID":"a2eb10cb-dec4-433b-991f-17ba8db390bc","Type":"ContainerDied","Data":"2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f"} Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.677679 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wctbc" event={"ID":"a2eb10cb-dec4-433b-991f-17ba8db390bc","Type":"ContainerDied","Data":"2fee994e8e7e9c251c9985c10a1ab9a8c4e08921185eee5a21c791db8efcf5e4"} Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.676632 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wctbc" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.677712 4774 scope.go:117] "RemoveContainer" containerID="2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.702552 4774 scope.go:117] "RemoveContainer" containerID="ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.728034 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wctbc"] Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.740513 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wctbc"] Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.767068 4774 scope.go:117] "RemoveContainer" containerID="9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.798369 4774 scope.go:117] "RemoveContainer" containerID="2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f" Nov 21 15:45:29 crc kubenswrapper[4774]: E1121 15:45:29.798937 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f\": container with ID starting with 2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f not found: ID does not exist" containerID="2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.798982 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f"} err="failed to get container status \"2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f\": rpc error: code = NotFound desc = could not find container \"2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f\": container with ID starting with 2d0b00462957beca64e4922b1862619bb8a54c93a1e734f4c14b99ed6f718c6f not found: ID does not exist" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.799035 4774 scope.go:117] "RemoveContainer" containerID="ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a" Nov 21 15:45:29 crc kubenswrapper[4774]: E1121 15:45:29.799949 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a\": container with ID starting with ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a not found: ID does not exist" containerID="ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.799984 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a"} err="failed to get container status \"ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a\": rpc error: code = NotFound desc = could not find container \"ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a\": container with ID starting with ccb03e45ec34cc93c84efef070a4ada76b17f0bd8c1afce752d0685ed9be7a8a not found: ID does not exist" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.800004 4774 scope.go:117] "RemoveContainer" containerID="9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456" Nov 21 15:45:29 crc kubenswrapper[4774]: E1121 15:45:29.800378 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456\": container with ID starting with 9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456 not found: ID does not exist" containerID="9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456" Nov 21 15:45:29 crc kubenswrapper[4774]: I1121 15:45:29.800428 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456"} err="failed to get container status \"9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456\": rpc error: code = NotFound desc = could not find container \"9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456\": container with ID starting with 9fb778b44216fd6e1548837f2e7ef22572859659d6059884407fb02988ae6456 not found: ID does not exist" Nov 21 15:45:30 crc kubenswrapper[4774]: I1121 15:45:30.028512 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-lc24s"] Nov 21 15:45:30 crc kubenswrapper[4774]: I1121 15:45:30.039529 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-lc24s"] Nov 21 15:45:30 crc kubenswrapper[4774]: I1121 15:45:30.109461 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c58e79c-d93c-43d1-bc52-d7ba1de82482" path="/var/lib/kubelet/pods/6c58e79c-d93c-43d1-bc52-d7ba1de82482/volumes" Nov 21 15:45:30 crc kubenswrapper[4774]: I1121 15:45:30.110302 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" path="/var/lib/kubelet/pods/a2eb10cb-dec4-433b-991f-17ba8db390bc/volumes" Nov 21 15:45:30 crc kubenswrapper[4774]: I1121 15:45:30.694128 4774 generic.go:334] "Generic (PLEG): container finished" podID="beddfe34-7178-4f7d-9428-ebd52715e910" containerID="8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26" exitCode=0 Nov 21 15:45:30 crc kubenswrapper[4774]: I1121 15:45:30.694254 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b68ccc75c-xqkjc" event={"ID":"beddfe34-7178-4f7d-9428-ebd52715e910","Type":"ContainerDied","Data":"8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26"} Nov 21 15:45:34 crc kubenswrapper[4774]: I1121 15:45:34.838107 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/heat-engine-94df8c594-xh8vx" Nov 21 15:45:35 crc kubenswrapper[4774]: I1121 15:45:35.093325 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:45:35 crc kubenswrapper[4774]: E1121 15:45:35.093714 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:45:36 crc kubenswrapper[4774]: I1121 15:45:36.857287 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b68ccc75c-xqkjc" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Nov 21 15:45:46 crc kubenswrapper[4774]: I1121 15:45:46.694215 4774 scope.go:117] "RemoveContainer" containerID="159f6a9ed9061fbdc774c20ad9884c95c9c7a1089f483b2cf76a6fbb9eb2937c" Nov 21 15:45:46 crc kubenswrapper[4774]: I1121 15:45:46.729599 4774 scope.go:117] "RemoveContainer" containerID="07b4199853d87430a2480cfe58b005b834ea1e8b113e865e7f15d5fdc3fe0ec8" Nov 21 15:45:46 crc kubenswrapper[4774]: I1121 15:45:46.778375 4774 scope.go:117] "RemoveContainer" containerID="448ba4ab0aca6edd03851feb3559a2a615dfc1a2c92696d1b84c17941b929c7a" Nov 21 15:45:46 crc kubenswrapper[4774]: I1121 15:45:46.827784 4774 scope.go:117] "RemoveContainer" containerID="5d8a9b3c8014fb2061d177ab8c45d0e93aae00047dfe2a5cf08976e247c92465" Nov 21 15:45:46 crc kubenswrapper[4774]: I1121 15:45:46.858925 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b68ccc75c-xqkjc" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Nov 21 15:45:46 crc kubenswrapper[4774]: I1121 15:45:46.879481 4774 scope.go:117] "RemoveContainer" containerID="2768c28dfbbd275bdd9cfe8a0a5deb4b8b1e8febb1eb57d9a9586cf3b89803f4" Nov 21 15:45:48 crc kubenswrapper[4774]: I1121 15:45:48.093877 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:45:48 crc kubenswrapper[4774]: E1121 15:45:48.095320 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.116641 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw"] Nov 21 15:45:53 crc kubenswrapper[4774]: E1121 15:45:53.117641 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="extract-utilities" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.117660 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="extract-utilities" Nov 21 15:45:53 crc kubenswrapper[4774]: E1121 15:45:53.117670 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="registry-server" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.117679 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="registry-server" Nov 21 15:45:53 crc kubenswrapper[4774]: E1121 15:45:53.117694 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="extract-content" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.117700 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="extract-content" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.117972 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2eb10cb-dec4-433b-991f-17ba8db390bc" containerName="registry-server" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.119736 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.126018 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.131467 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw"] Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.245797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxhpf\" (UniqueName: \"kubernetes.io/projected/dfe3578c-e945-477d-9a6d-ade2a4563182-kube-api-access-wxhpf\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.246191 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.246238 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.347748 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxhpf\" (UniqueName: \"kubernetes.io/projected/dfe3578c-e945-477d-9a6d-ade2a4563182-kube-api-access-wxhpf\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.347855 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.347917 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.348365 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.348444 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.367482 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxhpf\" (UniqueName: \"kubernetes.io/projected/dfe3578c-e945-477d-9a6d-ade2a4563182-kube-api-access-wxhpf\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:53 crc kubenswrapper[4774]: I1121 15:45:53.440125 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:54 crc kubenswrapper[4774]: I1121 15:45:54.070582 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw"] Nov 21 15:45:54 crc kubenswrapper[4774]: I1121 15:45:54.950883 4774 generic.go:334] "Generic (PLEG): container finished" podID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerID="20e1ba5c863099b4609764078102efd9d9f4b859e44fc3e191e702877d7b4241" exitCode=0 Nov 21 15:45:54 crc kubenswrapper[4774]: I1121 15:45:54.950930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" event={"ID":"dfe3578c-e945-477d-9a6d-ade2a4563182","Type":"ContainerDied","Data":"20e1ba5c863099b4609764078102efd9d9f4b859e44fc3e191e702877d7b4241"} Nov 21 15:45:54 crc kubenswrapper[4774]: I1121 15:45:54.951236 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" event={"ID":"dfe3578c-e945-477d-9a6d-ade2a4563182","Type":"ContainerStarted","Data":"fd1c7346ddb668bc50cf420fb8a6de6f57436c8fb263f0c272c6aa618753597f"} Nov 21 15:45:56 crc kubenswrapper[4774]: I1121 15:45:56.857250 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-7b68ccc75c-xqkjc" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.111:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.111:8080: connect: connection refused" Nov 21 15:45:56 crc kubenswrapper[4774]: I1121 15:45:56.858004 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:45:56 crc kubenswrapper[4774]: I1121 15:45:56.988891 4774 generic.go:334] "Generic (PLEG): container finished" podID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerID="f5c1ed2e763c793b9f558e7300805340cbd063aa0e2a293d1f998ec6639860cf" exitCode=0 Nov 21 15:45:56 crc kubenswrapper[4774]: I1121 15:45:56.988934 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" event={"ID":"dfe3578c-e945-477d-9a6d-ade2a4563182","Type":"ContainerDied","Data":"f5c1ed2e763c793b9f558e7300805340cbd063aa0e2a293d1f998ec6639860cf"} Nov 21 15:45:57 crc kubenswrapper[4774]: I1121 15:45:57.940265 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.003603 4774 generic.go:334] "Generic (PLEG): container finished" podID="beddfe34-7178-4f7d-9428-ebd52715e910" containerID="ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c" exitCode=137 Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.003691 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7b68ccc75c-xqkjc" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.003700 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b68ccc75c-xqkjc" event={"ID":"beddfe34-7178-4f7d-9428-ebd52715e910","Type":"ContainerDied","Data":"ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c"} Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.003756 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7b68ccc75c-xqkjc" event={"ID":"beddfe34-7178-4f7d-9428-ebd52715e910","Type":"ContainerDied","Data":"4a5f5c8e32ca1d4b3e3552302d280c9891beb1566049d5ec6c185bb044b3a8d2"} Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.003776 4774 scope.go:117] "RemoveContainer" containerID="8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.006780 4774 generic.go:334] "Generic (PLEG): container finished" podID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerID="da0938baa7902b2021b7c28d7595a639d1be29ebdba9f0acd83583709a88a735" exitCode=0 Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.006881 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" event={"ID":"dfe3578c-e945-477d-9a6d-ade2a4563182","Type":"ContainerDied","Data":"da0938baa7902b2021b7c28d7595a639d1be29ebdba9f0acd83583709a88a735"} Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.048955 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beddfe34-7178-4f7d-9428-ebd52715e910-logs\") pod \"beddfe34-7178-4f7d-9428-ebd52715e910\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.049192 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-config-data\") pod \"beddfe34-7178-4f7d-9428-ebd52715e910\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.049257 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/beddfe34-7178-4f7d-9428-ebd52715e910-horizon-secret-key\") pod \"beddfe34-7178-4f7d-9428-ebd52715e910\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.049362 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-scripts\") pod \"beddfe34-7178-4f7d-9428-ebd52715e910\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.049501 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vd4nn\" (UniqueName: \"kubernetes.io/projected/beddfe34-7178-4f7d-9428-ebd52715e910-kube-api-access-vd4nn\") pod \"beddfe34-7178-4f7d-9428-ebd52715e910\" (UID: \"beddfe34-7178-4f7d-9428-ebd52715e910\") " Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.051780 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/beddfe34-7178-4f7d-9428-ebd52715e910-logs" (OuterVolumeSpecName: "logs") pod "beddfe34-7178-4f7d-9428-ebd52715e910" (UID: "beddfe34-7178-4f7d-9428-ebd52715e910"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.075325 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/beddfe34-7178-4f7d-9428-ebd52715e910-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "beddfe34-7178-4f7d-9428-ebd52715e910" (UID: "beddfe34-7178-4f7d-9428-ebd52715e910"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.075448 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/beddfe34-7178-4f7d-9428-ebd52715e910-kube-api-access-vd4nn" (OuterVolumeSpecName: "kube-api-access-vd4nn") pod "beddfe34-7178-4f7d-9428-ebd52715e910" (UID: "beddfe34-7178-4f7d-9428-ebd52715e910"). InnerVolumeSpecName "kube-api-access-vd4nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.078058 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-scripts" (OuterVolumeSpecName: "scripts") pod "beddfe34-7178-4f7d-9428-ebd52715e910" (UID: "beddfe34-7178-4f7d-9428-ebd52715e910"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.080640 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-config-data" (OuterVolumeSpecName: "config-data") pod "beddfe34-7178-4f7d-9428-ebd52715e910" (UID: "beddfe34-7178-4f7d-9428-ebd52715e910"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.152671 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vd4nn\" (UniqueName: \"kubernetes.io/projected/beddfe34-7178-4f7d-9428-ebd52715e910-kube-api-access-vd4nn\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.152726 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/beddfe34-7178-4f7d-9428-ebd52715e910-logs\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.152740 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.152754 4774 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/beddfe34-7178-4f7d-9428-ebd52715e910-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.152766 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/beddfe34-7178-4f7d-9428-ebd52715e910-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.198057 4774 scope.go:117] "RemoveContainer" containerID="ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.240131 4774 scope.go:117] "RemoveContainer" containerID="8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26" Nov 21 15:45:58 crc kubenswrapper[4774]: E1121 15:45:58.242149 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26\": container with ID starting with 8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26 not found: ID does not exist" containerID="8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.242189 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26"} err="failed to get container status \"8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26\": rpc error: code = NotFound desc = could not find container \"8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26\": container with ID starting with 8409a16aeef384e26dc3e98f17dde40d5fb37f189be0cae7300e2eeed60a4d26 not found: ID does not exist" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.242218 4774 scope.go:117] "RemoveContainer" containerID="ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c" Nov 21 15:45:58 crc kubenswrapper[4774]: E1121 15:45:58.243158 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c\": container with ID starting with ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c not found: ID does not exist" containerID="ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.243183 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c"} err="failed to get container status \"ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c\": rpc error: code = NotFound desc = could not find container \"ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c\": container with ID starting with ebad3fcea977122d1e849569a5888021accc5549e598271a839ebcbb41d49a7c not found: ID does not exist" Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.324663 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7b68ccc75c-xqkjc"] Nov 21 15:45:58 crc kubenswrapper[4774]: I1121 15:45:58.331399 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7b68ccc75c-xqkjc"] Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.400845 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.582253 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-bundle\") pod \"dfe3578c-e945-477d-9a6d-ade2a4563182\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.582329 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-util\") pod \"dfe3578c-e945-477d-9a6d-ade2a4563182\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.582521 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxhpf\" (UniqueName: \"kubernetes.io/projected/dfe3578c-e945-477d-9a6d-ade2a4563182-kube-api-access-wxhpf\") pod \"dfe3578c-e945-477d-9a6d-ade2a4563182\" (UID: \"dfe3578c-e945-477d-9a6d-ade2a4563182\") " Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.584464 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-bundle" (OuterVolumeSpecName: "bundle") pod "dfe3578c-e945-477d-9a6d-ade2a4563182" (UID: "dfe3578c-e945-477d-9a6d-ade2a4563182"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.593262 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfe3578c-e945-477d-9a6d-ade2a4563182-kube-api-access-wxhpf" (OuterVolumeSpecName: "kube-api-access-wxhpf") pod "dfe3578c-e945-477d-9a6d-ade2a4563182" (UID: "dfe3578c-e945-477d-9a6d-ade2a4563182"). InnerVolumeSpecName "kube-api-access-wxhpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.593490 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-util" (OuterVolumeSpecName: "util") pod "dfe3578c-e945-477d-9a6d-ade2a4563182" (UID: "dfe3578c-e945-477d-9a6d-ade2a4563182"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.685626 4774 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.685702 4774 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dfe3578c-e945-477d-9a6d-ade2a4563182-util\") on node \"crc\" DevicePath \"\"" Nov 21 15:45:59 crc kubenswrapper[4774]: I1121 15:45:59.685719 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxhpf\" (UniqueName: \"kubernetes.io/projected/dfe3578c-e945-477d-9a6d-ade2a4563182-kube-api-access-wxhpf\") on node \"crc\" DevicePath \"\"" Nov 21 15:46:00 crc kubenswrapper[4774]: I1121 15:46:00.036410 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" event={"ID":"dfe3578c-e945-477d-9a6d-ade2a4563182","Type":"ContainerDied","Data":"fd1c7346ddb668bc50cf420fb8a6de6f57436c8fb263f0c272c6aa618753597f"} Nov 21 15:46:00 crc kubenswrapper[4774]: I1121 15:46:00.036451 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd1c7346ddb668bc50cf420fb8a6de6f57436c8fb263f0c272c6aa618753597f" Nov 21 15:46:00 crc kubenswrapper[4774]: I1121 15:46:00.036683 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw" Nov 21 15:46:00 crc kubenswrapper[4774]: I1121 15:46:00.118287 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" path="/var/lib/kubelet/pods/beddfe34-7178-4f7d-9428-ebd52715e910/volumes" Nov 21 15:46:02 crc kubenswrapper[4774]: I1121 15:46:02.094262 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:46:03 crc kubenswrapper[4774]: I1121 15:46:03.066889 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"af288035ac19f9f50a6dc5ab6216f56c123497fcf2d36d36aec3fddf5ed00acc"} Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.075437 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-std25"] Nov 21 15:46:14 crc kubenswrapper[4774]: E1121 15:46:14.076340 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon-log" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076355 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon-log" Nov 21 15:46:14 crc kubenswrapper[4774]: E1121 15:46:14.076382 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="util" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076388 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="util" Nov 21 15:46:14 crc kubenswrapper[4774]: E1121 15:46:14.076398 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076404 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" Nov 21 15:46:14 crc kubenswrapper[4774]: E1121 15:46:14.076412 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="pull" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076417 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="pull" Nov 21 15:46:14 crc kubenswrapper[4774]: E1121 15:46:14.076438 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="extract" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076445 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="extract" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076635 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon-log" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076647 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="beddfe34-7178-4f7d-9428-ebd52715e910" containerName="horizon" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.076660 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfe3578c-e945-477d-9a6d-ade2a4563182" containerName="extract" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.077382 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.079319 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-q44hl" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.079541 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.092432 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.120286 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgsd4\" (UniqueName: \"kubernetes.io/projected/5ea09086-0f96-4641-ac00-84ad39559acc-kube-api-access-cgsd4\") pod \"obo-prometheus-operator-668cf9dfbb-std25\" (UID: \"5ea09086-0f96-4641-ac00-84ad39559acc\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.181473 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-std25"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.234079 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgsd4\" (UniqueName: \"kubernetes.io/projected/5ea09086-0f96-4641-ac00-84ad39559acc-kube-api-access-cgsd4\") pod \"obo-prometheus-operator-668cf9dfbb-std25\" (UID: \"5ea09086-0f96-4641-ac00-84ad39559acc\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.303091 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.316562 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgsd4\" (UniqueName: \"kubernetes.io/projected/5ea09086-0f96-4641-ac00-84ad39559acc-kube-api-access-cgsd4\") pod \"obo-prometheus-operator-668cf9dfbb-std25\" (UID: \"5ea09086-0f96-4641-ac00-84ad39559acc\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.321594 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.321752 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.324455 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.324512 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-8w5qf" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.327338 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.341152 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.349847 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.415949 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.416441 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-cv785"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.419001 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.430968 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.431559 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-84ztv" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.431970 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-cv785"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.444083 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-n85w9\" (UID: \"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.444228 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8a13a74a-7bd0-4e11-9dec-402a38c7e984-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-plznc\" (UID: \"8a13a74a-7bd0-4e11-9dec-402a38c7e984\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.444319 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-n85w9\" (UID: \"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.444337 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8a13a74a-7bd0-4e11-9dec-402a38c7e984-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-plznc\" (UID: \"8a13a74a-7bd0-4e11-9dec-402a38c7e984\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.546578 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-n85w9\" (UID: \"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.546920 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8a13a74a-7bd0-4e11-9dec-402a38c7e984-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-plznc\" (UID: \"8a13a74a-7bd0-4e11-9dec-402a38c7e984\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.547015 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-n85w9\" (UID: \"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.547075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-647qt\" (UniqueName: \"kubernetes.io/projected/cda01f23-488b-459e-8ec4-f4825f188d16-kube-api-access-647qt\") pod \"observability-operator-d8bb48f5d-cv785\" (UID: \"cda01f23-488b-459e-8ec4-f4825f188d16\") " pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.547129 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8a13a74a-7bd0-4e11-9dec-402a38c7e984-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-plznc\" (UID: \"8a13a74a-7bd0-4e11-9dec-402a38c7e984\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.547159 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/cda01f23-488b-459e-8ec4-f4825f188d16-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-cv785\" (UID: \"cda01f23-488b-459e-8ec4-f4825f188d16\") " pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.557309 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8a13a74a-7bd0-4e11-9dec-402a38c7e984-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-plznc\" (UID: \"8a13a74a-7bd0-4e11-9dec-402a38c7e984\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.557568 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-n85w9\" (UID: \"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.566967 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8a13a74a-7bd0-4e11-9dec-402a38c7e984-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-plznc\" (UID: \"8a13a74a-7bd0-4e11-9dec-402a38c7e984\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.589656 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-879c99469-n85w9\" (UID: \"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.633872 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-54wn4"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.635351 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.640159 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-fzn6s" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.649151 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-647qt\" (UniqueName: \"kubernetes.io/projected/cda01f23-488b-459e-8ec4-f4825f188d16-kube-api-access-647qt\") pod \"observability-operator-d8bb48f5d-cv785\" (UID: \"cda01f23-488b-459e-8ec4-f4825f188d16\") " pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.649248 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/cda01f23-488b-459e-8ec4-f4825f188d16-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-cv785\" (UID: \"cda01f23-488b-459e-8ec4-f4825f188d16\") " pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.687192 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.687198 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.691689 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-54wn4"] Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.699658 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-647qt\" (UniqueName: \"kubernetes.io/projected/cda01f23-488b-459e-8ec4-f4825f188d16-kube-api-access-647qt\") pod \"observability-operator-d8bb48f5d-cv785\" (UID: \"cda01f23-488b-459e-8ec4-f4825f188d16\") " pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.700767 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/cda01f23-488b-459e-8ec4-f4825f188d16-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-cv785\" (UID: \"cda01f23-488b-459e-8ec4-f4825f188d16\") " pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.750890 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6bdn\" (UniqueName: \"kubernetes.io/projected/cf419f67-423f-4dcb-86e6-ad76fd3a9489-kube-api-access-s6bdn\") pod \"perses-operator-5446b9c989-54wn4\" (UID: \"cf419f67-423f-4dcb-86e6-ad76fd3a9489\") " pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.750938 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cf419f67-423f-4dcb-86e6-ad76fd3a9489-openshift-service-ca\") pod \"perses-operator-5446b9c989-54wn4\" (UID: \"cf419f67-423f-4dcb-86e6-ad76fd3a9489\") " pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.849558 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.858241 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6bdn\" (UniqueName: \"kubernetes.io/projected/cf419f67-423f-4dcb-86e6-ad76fd3a9489-kube-api-access-s6bdn\") pod \"perses-operator-5446b9c989-54wn4\" (UID: \"cf419f67-423f-4dcb-86e6-ad76fd3a9489\") " pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.858617 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cf419f67-423f-4dcb-86e6-ad76fd3a9489-openshift-service-ca\") pod \"perses-operator-5446b9c989-54wn4\" (UID: \"cf419f67-423f-4dcb-86e6-ad76fd3a9489\") " pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.860600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cf419f67-423f-4dcb-86e6-ad76fd3a9489-openshift-service-ca\") pod \"perses-operator-5446b9c989-54wn4\" (UID: \"cf419f67-423f-4dcb-86e6-ad76fd3a9489\") " pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:14 crc kubenswrapper[4774]: I1121 15:46:14.926476 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6bdn\" (UniqueName: \"kubernetes.io/projected/cf419f67-423f-4dcb-86e6-ad76fd3a9489-kube-api-access-s6bdn\") pod \"perses-operator-5446b9c989-54wn4\" (UID: \"cf419f67-423f-4dcb-86e6-ad76fd3a9489\") " pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:15 crc kubenswrapper[4774]: I1121 15:46:15.029094 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:15 crc kubenswrapper[4774]: I1121 15:46:15.424263 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-std25"] Nov 21 15:46:15 crc kubenswrapper[4774]: I1121 15:46:15.667957 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc"] Nov 21 15:46:15 crc kubenswrapper[4774]: W1121 15:46:15.683769 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a13a74a_7bd0_4e11_9dec_402a38c7e984.slice/crio-005c0b84086ccfb3d70190e22189e3c9ba68cc25d17f43e43da077a8f55349ad WatchSource:0}: Error finding container 005c0b84086ccfb3d70190e22189e3c9ba68cc25d17f43e43da077a8f55349ad: Status 404 returned error can't find the container with id 005c0b84086ccfb3d70190e22189e3c9ba68cc25d17f43e43da077a8f55349ad Nov 21 15:46:15 crc kubenswrapper[4774]: I1121 15:46:15.831137 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-cv785"] Nov 21 15:46:15 crc kubenswrapper[4774]: I1121 15:46:15.841442 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-54wn4"] Nov 21 15:46:15 crc kubenswrapper[4774]: I1121 15:46:15.953361 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9"] Nov 21 15:46:15 crc kubenswrapper[4774]: W1121 15:46:15.960283 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5dbe1bcc_00ab_42f2_aa80_7e8fad97f9f8.slice/crio-71707417e1f1778d94ebd3bb129c2fc3bed19611d1688a60378707491b4ef8fb WatchSource:0}: Error finding container 71707417e1f1778d94ebd3bb129c2fc3bed19611d1688a60378707491b4ef8fb: Status 404 returned error can't find the container with id 71707417e1f1778d94ebd3bb129c2fc3bed19611d1688a60378707491b4ef8fb Nov 21 15:46:16 crc kubenswrapper[4774]: I1121 15:46:16.212802 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" event={"ID":"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8","Type":"ContainerStarted","Data":"71707417e1f1778d94ebd3bb129c2fc3bed19611d1688a60378707491b4ef8fb"} Nov 21 15:46:16 crc kubenswrapper[4774]: I1121 15:46:16.214467 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" event={"ID":"cda01f23-488b-459e-8ec4-f4825f188d16","Type":"ContainerStarted","Data":"ea51c0404ad7499e3240db051c60f3e94bcf4067ba6631c48803c9f4760ecebb"} Nov 21 15:46:16 crc kubenswrapper[4774]: I1121 15:46:16.215842 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" event={"ID":"5ea09086-0f96-4641-ac00-84ad39559acc","Type":"ContainerStarted","Data":"c085427d6b04f5fabf8e8c0258d899229b2960c1ec6440e4a0676c718bff9815"} Nov 21 15:46:16 crc kubenswrapper[4774]: I1121 15:46:16.216975 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-54wn4" event={"ID":"cf419f67-423f-4dcb-86e6-ad76fd3a9489","Type":"ContainerStarted","Data":"ed16644d4b6a6a79170fc7deec2a848795f680ae55e2bacefc6be76eadceb412"} Nov 21 15:46:16 crc kubenswrapper[4774]: I1121 15:46:16.218872 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" event={"ID":"8a13a74a-7bd0-4e11-9dec-402a38c7e984","Type":"ContainerStarted","Data":"005c0b84086ccfb3d70190e22189e3c9ba68cc25d17f43e43da077a8f55349ad"} Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.056535 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-s5c2v"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.066187 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-xxwjm"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.075755 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-s5c2v"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.084743 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-xxwjm"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.105448 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59557569-c6d3-4b74-96e0-75c02abea174" path="/var/lib/kubelet/pods/59557569-c6d3-4b74-96e0-75c02abea174/volumes" Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.106236 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9a0830b-b047-4cc2-aaba-b448f08dc43a" path="/var/lib/kubelet/pods/b9a0830b-b047-4cc2-aaba-b448f08dc43a/volumes" Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.106767 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-38dc-account-create-bbsjq"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.106794 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-38dc-account-create-bbsjq"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.115476 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-lpd59"] Nov 21 15:46:26 crc kubenswrapper[4774]: I1121 15:46:26.127114 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-lpd59"] Nov 21 15:46:27 crc kubenswrapper[4774]: I1121 15:46:27.033364 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-2e8b-account-create-wl72v"] Nov 21 15:46:27 crc kubenswrapper[4774]: I1121 15:46:27.041361 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-18af-account-create-n489c"] Nov 21 15:46:27 crc kubenswrapper[4774]: I1121 15:46:27.056068 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-2e8b-account-create-wl72v"] Nov 21 15:46:27 crc kubenswrapper[4774]: I1121 15:46:27.064582 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-18af-account-create-n489c"] Nov 21 15:46:28 crc kubenswrapper[4774]: I1121 15:46:28.107648 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c24e11c-ddbd-4804-acda-d04dd5e0e799" path="/var/lib/kubelet/pods/3c24e11c-ddbd-4804-acda-d04dd5e0e799/volumes" Nov 21 15:46:28 crc kubenswrapper[4774]: I1121 15:46:28.549252 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d2eab66-4529-4272-a749-f5cda51164e1" path="/var/lib/kubelet/pods/9d2eab66-4529-4272-a749-f5cda51164e1/volumes" Nov 21 15:46:28 crc kubenswrapper[4774]: I1121 15:46:28.554187 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a65ba851-c659-4aa1-9db7-716479598c2e" path="/var/lib/kubelet/pods/a65ba851-c659-4aa1-9db7-716479598c2e/volumes" Nov 21 15:46:28 crc kubenswrapper[4774]: I1121 15:46:28.554911 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda21d9c-81e1-41d8-b983-0e1e46b32bcc" path="/var/lib/kubelet/pods/fda21d9c-81e1-41d8-b983-0e1e46b32bcc/volumes" Nov 21 15:46:36 crc kubenswrapper[4774]: I1121 15:46:36.044436 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c2vlc"] Nov 21 15:46:36 crc kubenswrapper[4774]: I1121 15:46:36.059375 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-c2vlc"] Nov 21 15:46:36 crc kubenswrapper[4774]: I1121 15:46:36.106375 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef417cce-2b05-45db-86b0-0b8b907690b4" path="/var/lib/kubelet/pods/ef417cce-2b05-45db-86b0-0b8b907690b4/volumes" Nov 21 15:46:46 crc kubenswrapper[4774]: E1121 15:46:46.084642 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb" Nov 21 15:46:46 crc kubenswrapper[4774]: E1121 15:46:46.085596 4774 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb,Command:[],Args:[--namespace=$(NAMESPACE) --images=perses=$(RELATED_IMAGE_PERSES) --images=alertmanager=$(RELATED_IMAGE_ALERTMANAGER) --images=prometheus=$(RELATED_IMAGE_PROMETHEUS) --images=thanos=$(RELATED_IMAGE_THANOS) --images=ui-dashboards=$(RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN) --images=ui-distributed-tracing=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN) --images=ui-distributed-tracing-pf5=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5) --images=ui-distributed-tracing-pf4=$(RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4) --images=ui-logging=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN) --images=ui-logging-pf4=$(RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4) --images=ui-troubleshooting-panel=$(RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN) --images=ui-monitoring=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN) --images=ui-monitoring-pf5=$(RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5) --images=korrel8r=$(RELATED_IMAGE_KORREL8R) --images=health-analyzer=$(RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER) --openshift.enabled=true],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:RELATED_IMAGE_ALERTMANAGER,Value:registry.redhat.io/cluster-observability-operator/alertmanager-rhel9@sha256:e718854a7d6ca8accf0fa72db0eb902e46c44d747ad51dc3f06bba0cefaa3c01,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS,Value:registry.redhat.io/cluster-observability-operator/prometheus-rhel9@sha256:17ea20be390a94ab39f5cdd7f0cbc2498046eebcf77fe3dec9aa288d5c2cf46b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_THANOS,Value:registry.redhat.io/cluster-observability-operator/thanos-rhel9@sha256:d972f4faa5e9c121402d23ed85002f26af48ec36b1b71a7489d677b3913d08b4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PERSES,Value:registry.redhat.io/cluster-observability-operator/perses-rhel9@sha256:91531137fc1dcd740e277e0f65e120a0176a16f788c14c27925b61aa0b792ade,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DASHBOARDS_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/dashboards-console-plugin-rhel9@sha256:a69da8bbca8a28dd2925f864d51cc31cf761b10532c553095ba40b242ef701cb,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-rhel9@sha256:897e1bfad1187062725b54d87107bd0155972257a50d8335dd29e1999b828a4f,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf5-rhel9@sha256:95fe5b5746ca8c07ac9217ce2d8ac8e6afad17af210f9d8e0074df1310b209a8,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_DISTRIBUTED_TRACING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/distributed-tracing-console-plugin-pf4-rhel9@sha256:e9d9a89e4d8126a62b1852055482258ee528cac6398dd5d43ebad75ace0f33c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-rhel9@sha256:ec684a0645ceb917b019af7ddba68c3533416e356ab0d0320a30e75ca7ebb31b,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_LOGGING_PLUGIN_PF4,Value:registry.redhat.io/cluster-observability-operator/logging-console-plugin-pf4-rhel9@sha256:3b9693fcde9b3a9494fb04735b1f7cfd0426f10be820fdc3f024175c0d3df1c9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_TROUBLESHOOTING_PANEL_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/troubleshooting-panel-console-plugin-rhel9@sha256:580606f194180accc8abba099e17a26dca7522ec6d233fa2fdd40312771703e3,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-rhel9@sha256:e03777be39e71701935059cd877603874a13ac94daa73219d4e5e545599d78a9,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CONSOLE_MONITORING_PLUGIN_PF5,Value:registry.redhat.io/cluster-observability-operator/monitoring-console-plugin-pf5-rhel9@sha256:aa47256193cfd2877853878e1ae97d2ab8b8e5deae62b387cbfad02b284d379c,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KORREL8R,Value:registry.redhat.io/cluster-observability-operator/korrel8r-rhel9@sha256:c595ff56b2cb85514bf4784db6ddb82e4e657e3e708a7fb695fc4997379a94d4,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLUSTER_HEALTH_ANALYZER,Value:registry.redhat.io/cluster-observability-operator/cluster-health-analyzer-rhel9@sha256:45a4ec2a519bcec99e886aa91596d5356a2414a2bd103baaef9fa7838c672eb2,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{400 -3} {} 400m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:observability-operator-tls,ReadOnly:true,MountPath:/etc/tls/private,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-647qt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000350000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod observability-operator-d8bb48f5d-cv785_openshift-operators(cda01f23-488b-459e-8ec4-f4825f188d16): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 21 15:46:46 crc kubenswrapper[4774]: E1121 15:46:46.086932 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" podUID="cda01f23-488b-459e-8ec4-f4825f188d16" Nov 21 15:46:46 crc kubenswrapper[4774]: I1121 15:46:46.540040 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-54wn4" event={"ID":"cf419f67-423f-4dcb-86e6-ad76fd3a9489","Type":"ContainerStarted","Data":"3642d44a4bbcc3ada9804ce32fce942ee49708f39a582e75a3ba6a27ec066aca"} Nov 21 15:46:46 crc kubenswrapper[4774]: I1121 15:46:46.540245 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:46 crc kubenswrapper[4774]: E1121 15:46:46.543489 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/cluster-observability-rhel9-operator@sha256:ce7d2904f7b238aa37dfe74a0b76bf73629e7a14fa52bf54b0ecf030ca36f1bb\\\"\"" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" podUID="cda01f23-488b-459e-8ec4-f4825f188d16" Nov 21 15:46:46 crc kubenswrapper[4774]: I1121 15:46:46.569089 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-54wn4" podStartSLOduration=2.340809292 podStartE2EDuration="32.569056139s" podCreationTimestamp="2025-11-21 15:46:14 +0000 UTC" firstStartedPulling="2025-11-21 15:46:15.840036057 +0000 UTC m=+6166.492235316" lastFinishedPulling="2025-11-21 15:46:46.068282904 +0000 UTC m=+6196.720482163" observedRunningTime="2025-11-21 15:46:46.559295391 +0000 UTC m=+6197.211494650" watchObservedRunningTime="2025-11-21 15:46:46.569056139 +0000 UTC m=+6197.221255398" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.053562 4774 scope.go:117] "RemoveContainer" containerID="c2b4ba0ca7b68d0812924bad644b73737282143d85f988987606b0d42a11e26a" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.098866 4774 scope.go:117] "RemoveContainer" containerID="d799e91e7cedd65e12addf4ad46a078b0f63dfc609abfe23d6cfaadd513822f7" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.121691 4774 scope.go:117] "RemoveContainer" containerID="51405d86b51a3a16331046fbd5fa8b0fbfeda7f04fd8b713594c37ba79d521bf" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.175646 4774 scope.go:117] "RemoveContainer" containerID="62d827c975db7c505c288ac0928199bf72d5dede89475bc4a849187b7e66e913" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.242203 4774 scope.go:117] "RemoveContainer" containerID="9fb334a0f35bdd6fc020180c71510b22124f3ea7fd0059877bdfa571067f6385" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.269958 4774 scope.go:117] "RemoveContainer" containerID="8d96762923bd79d773d80ca706388011cb4ecf7f05241e5751b3db099fcf4b39" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.324370 4774 scope.go:117] "RemoveContainer" containerID="2bf651a832617b45579b6e6855693da67ad0b4ba7bbcdebdd074b46dd9b0c2d0" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.562209 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" event={"ID":"5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8","Type":"ContainerStarted","Data":"7715427bd4e5639ac451be7027076862b9f11411e641efc963553987bcb0e6d6"} Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.565652 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" event={"ID":"8a13a74a-7bd0-4e11-9dec-402a38c7e984","Type":"ContainerStarted","Data":"3a0d0e9a70553513d96814e95d844d93b2c50ac154c3fc0f63bc1b223c60ee79"} Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.568223 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" event={"ID":"5ea09086-0f96-4641-ac00-84ad39559acc","Type":"ContainerStarted","Data":"6b00305f088a44ef43666f84ed4607f39fb6e6862c8bbbd56b5f6e88d12340bd"} Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.583805 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-n85w9" podStartSLOduration=3.478564986 podStartE2EDuration="33.583782145s" podCreationTimestamp="2025-11-21 15:46:14 +0000 UTC" firstStartedPulling="2025-11-21 15:46:15.963678892 +0000 UTC m=+6166.615878151" lastFinishedPulling="2025-11-21 15:46:46.068896051 +0000 UTC m=+6196.721095310" observedRunningTime="2025-11-21 15:46:47.582432377 +0000 UTC m=+6198.234631686" watchObservedRunningTime="2025-11-21 15:46:47.583782145 +0000 UTC m=+6198.235981414" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.620355 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-std25" podStartSLOduration=2.996187535 podStartE2EDuration="33.620325657s" podCreationTimestamp="2025-11-21 15:46:14 +0000 UTC" firstStartedPulling="2025-11-21 15:46:15.440813177 +0000 UTC m=+6166.093012436" lastFinishedPulling="2025-11-21 15:46:46.064951299 +0000 UTC m=+6196.717150558" observedRunningTime="2025-11-21 15:46:47.607373288 +0000 UTC m=+6198.259572547" watchObservedRunningTime="2025-11-21 15:46:47.620325657 +0000 UTC m=+6198.272524916" Nov 21 15:46:47 crc kubenswrapper[4774]: I1121 15:46:47.645054 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-879c99469-plznc" podStartSLOduration=3.297113913 podStartE2EDuration="33.645026671s" podCreationTimestamp="2025-11-21 15:46:14 +0000 UTC" firstStartedPulling="2025-11-21 15:46:15.720453118 +0000 UTC m=+6166.372652377" lastFinishedPulling="2025-11-21 15:46:46.068365876 +0000 UTC m=+6196.720565135" observedRunningTime="2025-11-21 15:46:47.628994374 +0000 UTC m=+6198.281193643" watchObservedRunningTime="2025-11-21 15:46:47.645026671 +0000 UTC m=+6198.297225930" Nov 21 15:46:55 crc kubenswrapper[4774]: I1121 15:46:55.034270 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-54wn4" Nov 21 15:46:55 crc kubenswrapper[4774]: I1121 15:46:55.038933 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z9dv6"] Nov 21 15:46:55 crc kubenswrapper[4774]: I1121 15:46:55.047237 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-z9dv6"] Nov 21 15:46:56 crc kubenswrapper[4774]: I1121 15:46:56.033827 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-qpm97"] Nov 21 15:46:56 crc kubenswrapper[4774]: I1121 15:46:56.044608 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-qpm97"] Nov 21 15:46:56 crc kubenswrapper[4774]: I1121 15:46:56.106382 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9911a5f9-8117-4f37-a08a-149593f09288" path="/var/lib/kubelet/pods/9911a5f9-8117-4f37-a08a-149593f09288/volumes" Nov 21 15:46:56 crc kubenswrapper[4774]: I1121 15:46:56.107002 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfe5a2bf-1a38-4433-bd7b-b25e9df160d7" path="/var/lib/kubelet/pods/cfe5a2bf-1a38-4433-bd7b-b25e9df160d7/volumes" Nov 21 15:47:03 crc kubenswrapper[4774]: I1121 15:47:03.830468 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" event={"ID":"cda01f23-488b-459e-8ec4-f4825f188d16","Type":"ContainerStarted","Data":"b959a3b22053d7158a600a54e62a3f72a5b058e5b105aabca8eb16b1aa2dca7f"} Nov 21 15:47:03 crc kubenswrapper[4774]: I1121 15:47:03.831739 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:47:03 crc kubenswrapper[4774]: I1121 15:47:03.834184 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" Nov 21 15:47:03 crc kubenswrapper[4774]: I1121 15:47:03.853517 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-cv785" podStartSLOduration=2.719083676 podStartE2EDuration="49.853497235s" podCreationTimestamp="2025-11-21 15:46:14 +0000 UTC" firstStartedPulling="2025-11-21 15:46:15.865132093 +0000 UTC m=+6166.517331352" lastFinishedPulling="2025-11-21 15:47:02.999545652 +0000 UTC m=+6213.651744911" observedRunningTime="2025-11-21 15:47:03.849352097 +0000 UTC m=+6214.501551366" watchObservedRunningTime="2025-11-21 15:47:03.853497235 +0000 UTC m=+6214.505696494" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.011511 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.012198 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/openstackclient" podUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" containerName="openstackclient" containerID="cri-o://40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe" gracePeriod=2 Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.027355 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.076880 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Nov 21 15:47:06 crc kubenswrapper[4774]: E1121 15:47:06.077389 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" containerName="openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.077408 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" containerName="openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.077591 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" containerName="openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.078374 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.098888 4774 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" podUID="ad5e9f00-e24f-4ebe-b915-1330652111e6" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.128014 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.188103 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ad5e9f00-e24f-4ebe-b915-1330652111e6-openstack-config\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.188364 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ad5e9f00-e24f-4ebe-b915-1330652111e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.188677 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5qn9\" (UniqueName: \"kubernetes.io/projected/ad5e9f00-e24f-4ebe-b915-1330652111e6-kube-api-access-r5qn9\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.290405 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.291788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ad5e9f00-e24f-4ebe-b915-1330652111e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.291910 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5qn9\" (UniqueName: \"kubernetes.io/projected/ad5e9f00-e24f-4ebe-b915-1330652111e6-kube-api-access-r5qn9\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.291801 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.292717 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ad5e9f00-e24f-4ebe-b915-1330652111e6-openstack-config\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.293999 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ad5e9f00-e24f-4ebe-b915-1330652111e6-openstack-config\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.295899 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-xndr4" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.301678 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ad5e9f00-e24f-4ebe-b915-1330652111e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.343714 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5qn9\" (UniqueName: \"kubernetes.io/projected/ad5e9f00-e24f-4ebe-b915-1330652111e6-kube-api-access-r5qn9\") pod \"openstackclient\" (UID: \"ad5e9f00-e24f-4ebe-b915-1330652111e6\") " pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.347884 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.394351 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45scm\" (UniqueName: \"kubernetes.io/projected/9c160743-659f-4bb2-9bc5-2da61867bb84-kube-api-access-45scm\") pod \"kube-state-metrics-0\" (UID: \"9c160743-659f-4bb2-9bc5-2da61867bb84\") " pod="openstack/kube-state-metrics-0" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.440914 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.498309 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45scm\" (UniqueName: \"kubernetes.io/projected/9c160743-659f-4bb2-9bc5-2da61867bb84-kube-api-access-45scm\") pod \"kube-state-metrics-0\" (UID: \"9c160743-659f-4bb2-9bc5-2da61867bb84\") " pod="openstack/kube-state-metrics-0" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.584642 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45scm\" (UniqueName: \"kubernetes.io/projected/9c160743-659f-4bb2-9bc5-2da61867bb84-kube-api-access-45scm\") pod \"kube-state-metrics-0\" (UID: \"9c160743-659f-4bb2-9bc5-2da61867bb84\") " pod="openstack/kube-state-metrics-0" Nov 21 15:47:06 crc kubenswrapper[4774]: I1121 15:47:06.748540 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.100806 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.109627 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.113135 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.113452 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.113638 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.113917 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-xddvm" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.122672 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.124401 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.132327 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.132417 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/c15aed44-a7b6-416f-90bd-2a42764b1e68-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.132467 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.132702 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c15aed44-a7b6-416f-90bd-2a42764b1e68-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.132826 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl7lb\" (UniqueName: \"kubernetes.io/projected/c15aed44-a7b6-416f-90bd-2a42764b1e68-kube-api-access-hl7lb\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.132955 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.133060 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c15aed44-a7b6-416f-90bd-2a42764b1e68-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.235135 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c15aed44-a7b6-416f-90bd-2a42764b1e68-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.235460 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.235501 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/c15aed44-a7b6-416f-90bd-2a42764b1e68-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.236030 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.236056 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/c15aed44-a7b6-416f-90bd-2a42764b1e68-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.236119 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c15aed44-a7b6-416f-90bd-2a42764b1e68-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.236163 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl7lb\" (UniqueName: \"kubernetes.io/projected/c15aed44-a7b6-416f-90bd-2a42764b1e68-kube-api-access-hl7lb\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.236199 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.250008 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.252420 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c15aed44-a7b6-416f-90bd-2a42764b1e68-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.256219 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.258645 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c15aed44-a7b6-416f-90bd-2a42764b1e68-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.261711 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c15aed44-a7b6-416f-90bd-2a42764b1e68-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.271723 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl7lb\" (UniqueName: \"kubernetes.io/projected/c15aed44-a7b6-416f-90bd-2a42764b1e68-kube-api-access-hl7lb\") pod \"alertmanager-metric-storage-0\" (UID: \"c15aed44-a7b6-416f-90bd-2a42764b1e68\") " pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.401097 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Nov 21 15:47:07 crc kubenswrapper[4774]: W1121 15:47:07.426737 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad5e9f00_e24f_4ebe_b915_1330652111e6.slice/crio-113d69ffdf26b28edb8d56235922ad3a8f765a4920b75a13b0f27fc3fb20ec0e WatchSource:0}: Error finding container 113d69ffdf26b28edb8d56235922ad3a8f765a4920b75a13b0f27fc3fb20ec0e: Status 404 returned error can't find the container with id 113d69ffdf26b28edb8d56235922ad3a8f765a4920b75a13b0f27fc3fb20ec0e Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.507519 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.570225 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.599380 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.602334 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.607519 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.607769 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.608044 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-sggl6" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.608281 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.609146 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.621151 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.658648 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735018 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6fw4\" (UniqueName: \"kubernetes.io/projected/7b305f49-43b7-43d8-972d-d07ace53858f-kube-api-access-f6fw4\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735088 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/7b305f49-43b7-43d8-972d-d07ace53858f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735158 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735284 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/7b305f49-43b7-43d8-972d-d07ace53858f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735347 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-config\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735389 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/7b305f49-43b7-43d8-972d-d07ace53858f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735418 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.735444 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837257 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-config\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837588 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837609 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/7b305f49-43b7-43d8-972d-d07ace53858f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837627 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837667 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6fw4\" (UniqueName: \"kubernetes.io/projected/7b305f49-43b7-43d8-972d-d07ace53858f-kube-api-access-f6fw4\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837694 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/7b305f49-43b7-43d8-972d-d07ace53858f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837727 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.837812 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/7b305f49-43b7-43d8-972d-d07ace53858f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.846268 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/7b305f49-43b7-43d8-972d-d07ace53858f-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.848400 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.849642 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/7b305f49-43b7-43d8-972d-d07ace53858f-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.852334 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-config\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.854345 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/7b305f49-43b7-43d8-972d-d07ace53858f-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.863435 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/7b305f49-43b7-43d8-972d-d07ace53858f-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.881067 4774 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.881120 4774 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/eceadd2d445bbac45e9f9bc82fb81235fa2ef6cead813d12edd255cb50edbc88/globalmount\"" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.891731 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6fw4\" (UniqueName: \"kubernetes.io/projected/7b305f49-43b7-43d8-972d-d07ace53858f-kube-api-access-f6fw4\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.936865 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9c160743-659f-4bb2-9bc5-2da61867bb84","Type":"ContainerStarted","Data":"f946f63a8ca854c864c87f7b700e93490ef4db89c4627c44d1fd3bcfd95bc060"} Nov 21 15:47:07 crc kubenswrapper[4774]: I1121 15:47:07.954008 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ad5e9f00-e24f-4ebe-b915-1330652111e6","Type":"ContainerStarted","Data":"113d69ffdf26b28edb8d56235922ad3a8f765a4920b75a13b0f27fc3fb20ec0e"} Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.155496 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d1f263ac-9008-4657-a43b-8032570a1fa2\") pod \"prometheus-metric-storage-0\" (UID: \"7b305f49-43b7-43d8-972d-d07ace53858f\") " pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.358369 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.435894 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.793800 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.870013 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config-secret\") pod \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.870210 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5j4ln\" (UniqueName: \"kubernetes.io/projected/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-kube-api-access-5j4ln\") pod \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.870320 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config\") pod \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\" (UID: \"f0b7c953-e181-4c03-bcb8-2bed4066a3fe\") " Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.879508 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-kube-api-access-5j4ln" (OuterVolumeSpecName: "kube-api-access-5j4ln") pod "f0b7c953-e181-4c03-bcb8-2bed4066a3fe" (UID: "f0b7c953-e181-4c03-bcb8-2bed4066a3fe"). InnerVolumeSpecName "kube-api-access-5j4ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.918881 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "f0b7c953-e181-4c03-bcb8-2bed4066a3fe" (UID: "f0b7c953-e181-4c03-bcb8-2bed4066a3fe"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.965236 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "f0b7c953-e181-4c03-bcb8-2bed4066a3fe" (UID: "f0b7c953-e181-4c03-bcb8-2bed4066a3fe"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.966953 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"9c160743-659f-4bb2-9bc5-2da61867bb84","Type":"ContainerStarted","Data":"aea9e13b46dd39188aae5f646a56ca19bfbc0507a5a41ffcd909e481266101fd"} Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.967875 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.973048 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5j4ln\" (UniqueName: \"kubernetes.io/projected/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-kube-api-access-5j4ln\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.973085 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.973098 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f0b7c953-e181-4c03-bcb8-2bed4066a3fe-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.975804 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ad5e9f00-e24f-4ebe-b915-1330652111e6","Type":"ContainerStarted","Data":"e021f978158a1ff28e42f70d8f8f445182908e207e8327d4387b6c3e8b44c77b"} Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.979952 4774 generic.go:334] "Generic (PLEG): container finished" podID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" containerID="40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe" exitCode=137 Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.980020 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.980057 4774 scope.go:117] "RemoveContainer" containerID="40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe" Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.982337 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c15aed44-a7b6-416f-90bd-2a42764b1e68","Type":"ContainerStarted","Data":"0a83589f38030d32413c838c3735d959fcb0b2f0cf372986a3b32c5d1f7ea14f"} Nov 21 15:47:08 crc kubenswrapper[4774]: I1121 15:47:08.992211 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.463382776 podStartE2EDuration="2.992188711s" podCreationTimestamp="2025-11-21 15:47:06 +0000 UTC" firstStartedPulling="2025-11-21 15:47:07.600664893 +0000 UTC m=+6218.252864152" lastFinishedPulling="2025-11-21 15:47:08.129470828 +0000 UTC m=+6218.781670087" observedRunningTime="2025-11-21 15:47:08.982234577 +0000 UTC m=+6219.634433836" watchObservedRunningTime="2025-11-21 15:47:08.992188711 +0000 UTC m=+6219.644387970" Nov 21 15:47:09 crc kubenswrapper[4774]: I1121 15:47:09.006762 4774 scope.go:117] "RemoveContainer" containerID="40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe" Nov 21 15:47:09 crc kubenswrapper[4774]: I1121 15:47:09.007153 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.007130927 podStartE2EDuration="3.007130927s" podCreationTimestamp="2025-11-21 15:47:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:47:09.003350409 +0000 UTC m=+6219.655549658" watchObservedRunningTime="2025-11-21 15:47:09.007130927 +0000 UTC m=+6219.659330176" Nov 21 15:47:09 crc kubenswrapper[4774]: I1121 15:47:09.008298 4774 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" podUID="ad5e9f00-e24f-4ebe-b915-1330652111e6" Nov 21 15:47:09 crc kubenswrapper[4774]: E1121 15:47:09.009863 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe\": container with ID starting with 40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe not found: ID does not exist" containerID="40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe" Nov 21 15:47:09 crc kubenswrapper[4774]: I1121 15:47:09.009906 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe"} err="failed to get container status \"40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe\": rpc error: code = NotFound desc = could not find container \"40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe\": container with ID starting with 40ef2673c1ba379dcb49a1f7e004150828d3b6bab68310b2d4b6ab5553656cbe not found: ID does not exist" Nov 21 15:47:09 crc kubenswrapper[4774]: I1121 15:47:09.137592 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Nov 21 15:47:09 crc kubenswrapper[4774]: W1121 15:47:09.141667 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b305f49_43b7_43d8_972d_d07ace53858f.slice/crio-22c77055e6c84492a7b782b7ca118017827fe9ec6e0a70c44f8fc0aed75e18a3 WatchSource:0}: Error finding container 22c77055e6c84492a7b782b7ca118017827fe9ec6e0a70c44f8fc0aed75e18a3: Status 404 returned error can't find the container with id 22c77055e6c84492a7b782b7ca118017827fe9ec6e0a70c44f8fc0aed75e18a3 Nov 21 15:47:09 crc kubenswrapper[4774]: I1121 15:47:09.994472 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"7b305f49-43b7-43d8-972d-d07ace53858f","Type":"ContainerStarted","Data":"22c77055e6c84492a7b782b7ca118017827fe9ec6e0a70c44f8fc0aed75e18a3"} Nov 21 15:47:10 crc kubenswrapper[4774]: I1121 15:47:10.111105 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0b7c953-e181-4c03-bcb8-2bed4066a3fe" path="/var/lib/kubelet/pods/f0b7c953-e181-4c03-bcb8-2bed4066a3fe/volumes" Nov 21 15:47:14 crc kubenswrapper[4774]: I1121 15:47:14.029220 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-nvdbj"] Nov 21 15:47:14 crc kubenswrapper[4774]: I1121 15:47:14.030511 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"7b305f49-43b7-43d8-972d-d07ace53858f","Type":"ContainerStarted","Data":"602353b2279a3f054b3d88cfc9a321aaab66d7a89d417eb0b524f9851c4e65bf"} Nov 21 15:47:14 crc kubenswrapper[4774]: I1121 15:47:14.038080 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-nvdbj"] Nov 21 15:47:14 crc kubenswrapper[4774]: I1121 15:47:14.103746 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa" path="/var/lib/kubelet/pods/e65de5ed-1d20-4d17-a53d-7ed8ad06d0aa/volumes" Nov 21 15:47:15 crc kubenswrapper[4774]: I1121 15:47:15.040239 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c15aed44-a7b6-416f-90bd-2a42764b1e68","Type":"ContainerStarted","Data":"a1b5b4521e8de149e8ea6305c15559ab91e66ef823048175b42fde59818f60db"} Nov 21 15:47:16 crc kubenswrapper[4774]: I1121 15:47:16.753230 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Nov 21 15:47:20 crc kubenswrapper[4774]: I1121 15:47:20.091858 4774 generic.go:334] "Generic (PLEG): container finished" podID="7b305f49-43b7-43d8-972d-d07ace53858f" containerID="602353b2279a3f054b3d88cfc9a321aaab66d7a89d417eb0b524f9851c4e65bf" exitCode=0 Nov 21 15:47:20 crc kubenswrapper[4774]: I1121 15:47:20.091913 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"7b305f49-43b7-43d8-972d-d07ace53858f","Type":"ContainerDied","Data":"602353b2279a3f054b3d88cfc9a321aaab66d7a89d417eb0b524f9851c4e65bf"} Nov 21 15:47:21 crc kubenswrapper[4774]: I1121 15:47:21.103228 4774 generic.go:334] "Generic (PLEG): container finished" podID="c15aed44-a7b6-416f-90bd-2a42764b1e68" containerID="a1b5b4521e8de149e8ea6305c15559ab91e66ef823048175b42fde59818f60db" exitCode=0 Nov 21 15:47:21 crc kubenswrapper[4774]: I1121 15:47:21.103328 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c15aed44-a7b6-416f-90bd-2a42764b1e68","Type":"ContainerDied","Data":"a1b5b4521e8de149e8ea6305c15559ab91e66ef823048175b42fde59818f60db"} Nov 21 15:47:27 crc kubenswrapper[4774]: I1121 15:47:27.171319 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c15aed44-a7b6-416f-90bd-2a42764b1e68","Type":"ContainerStarted","Data":"043f4bcf9bff2775b668158997b6e4520e6b460b6dcb60bd9d88d46ecb74ce4d"} Nov 21 15:47:27 crc kubenswrapper[4774]: I1121 15:47:27.174855 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"7b305f49-43b7-43d8-972d-d07ace53858f","Type":"ContainerStarted","Data":"f01d3ccfe64f45b27757a969a5f42b684fa9bcc4c0fa26f86236ea2bdf3d7a71"} Nov 21 15:47:30 crc kubenswrapper[4774]: I1121 15:47:30.211073 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"c15aed44-a7b6-416f-90bd-2a42764b1e68","Type":"ContainerStarted","Data":"2a27ae652871cb1f4289980f8495efbcce9dadcbd1224183323158844f5296b9"} Nov 21 15:47:30 crc kubenswrapper[4774]: I1121 15:47:30.212174 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:30 crc kubenswrapper[4774]: I1121 15:47:30.213745 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"7b305f49-43b7-43d8-972d-d07ace53858f","Type":"ContainerStarted","Data":"36eb5f7b4f96cbb44f2d63e196e62a85e648895cc3ebbab4c5bc6866efacf76d"} Nov 21 15:47:30 crc kubenswrapper[4774]: I1121 15:47:30.214768 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Nov 21 15:47:30 crc kubenswrapper[4774]: I1121 15:47:30.236674 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=5.592263276 podStartE2EDuration="23.236656152s" podCreationTimestamp="2025-11-21 15:47:07 +0000 UTC" firstStartedPulling="2025-11-21 15:47:08.472058494 +0000 UTC m=+6219.124257753" lastFinishedPulling="2025-11-21 15:47:26.11645136 +0000 UTC m=+6236.768650629" observedRunningTime="2025-11-21 15:47:30.236545979 +0000 UTC m=+6240.888745248" watchObservedRunningTime="2025-11-21 15:47:30.236656152 +0000 UTC m=+6240.888855411" Nov 21 15:47:35 crc kubenswrapper[4774]: I1121 15:47:35.271863 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"7b305f49-43b7-43d8-972d-d07ace53858f","Type":"ContainerStarted","Data":"7d3af30c3732c57023603d13993808e94ff93e53fc6d2251d349843a8eef68d9"} Nov 21 15:47:35 crc kubenswrapper[4774]: I1121 15:47:35.301932 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=4.34692608 podStartE2EDuration="29.301917315s" podCreationTimestamp="2025-11-21 15:47:06 +0000 UTC" firstStartedPulling="2025-11-21 15:47:09.144454721 +0000 UTC m=+6219.796653980" lastFinishedPulling="2025-11-21 15:47:34.099445956 +0000 UTC m=+6244.751645215" observedRunningTime="2025-11-21 15:47:35.30035228 +0000 UTC m=+6245.952551539" watchObservedRunningTime="2025-11-21 15:47:35.301917315 +0000 UTC m=+6245.954116574" Nov 21 15:47:38 crc kubenswrapper[4774]: I1121 15:47:38.359277 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:38 crc kubenswrapper[4774]: I1121 15:47:38.359933 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:38 crc kubenswrapper[4774]: I1121 15:47:38.363040 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:39 crc kubenswrapper[4774]: I1121 15:47:39.316030 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.297143 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.301375 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.305585 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.305620 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.306326 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390341 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqvs9\" (UniqueName: \"kubernetes.io/projected/9e3707c7-d51e-4671-9252-aec6c00606d3-kube-api-access-hqvs9\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390406 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-config-data\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390489 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390555 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-log-httpd\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390632 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-run-httpd\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390661 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-scripts\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.390678 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.492207 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqvs9\" (UniqueName: \"kubernetes.io/projected/9e3707c7-d51e-4671-9252-aec6c00606d3-kube-api-access-hqvs9\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.492703 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-config-data\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.492870 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.493024 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-log-httpd\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.493175 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-run-httpd\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.493296 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-scripts\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.493370 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.493479 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-log-httpd\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.494194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-run-httpd\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.499355 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.500466 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.506701 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-config-data\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.507362 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqvs9\" (UniqueName: \"kubernetes.io/projected/9e3707c7-d51e-4671-9252-aec6c00606d3-kube-api-access-hqvs9\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.516524 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-scripts\") pod \"ceilometer-0\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " pod="openstack/ceilometer-0" Nov 21 15:47:40 crc kubenswrapper[4774]: I1121 15:47:40.628352 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:47:41 crc kubenswrapper[4774]: I1121 15:47:41.138037 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:47:41 crc kubenswrapper[4774]: I1121 15:47:41.335490 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerStarted","Data":"86d590f9e0b6803b5b4cc694cfd969937bbb173121f4cca97487d48a7759419c"} Nov 21 15:47:42 crc kubenswrapper[4774]: I1121 15:47:42.346239 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerStarted","Data":"9c2e644cbd715359bafc3823c7b9b2fbb0b7bef4ad3171ace0488dff8b7b79d5"} Nov 21 15:47:43 crc kubenswrapper[4774]: I1121 15:47:43.360056 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerStarted","Data":"0da7c20e891f5e447ffb2d913e87ee88e92dbaeaaf4f11a3edfe2167f63b45d6"} Nov 21 15:47:44 crc kubenswrapper[4774]: I1121 15:47:44.371373 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerStarted","Data":"ca45d8a1be0e3e90a6c0496e353d3fb88387b513ca325a4387ebc746bef4b21a"} Nov 21 15:47:45 crc kubenswrapper[4774]: I1121 15:47:45.381407 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerStarted","Data":"996299051b2f8238923c0f4af15a79d515fc15425ea65770f188ecffa72ce1a0"} Nov 21 15:47:45 crc kubenswrapper[4774]: I1121 15:47:45.381663 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 15:47:45 crc kubenswrapper[4774]: I1121 15:47:45.405143 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.8161477110000002 podStartE2EDuration="5.40512377s" podCreationTimestamp="2025-11-21 15:47:40 +0000 UTC" firstStartedPulling="2025-11-21 15:47:41.142193959 +0000 UTC m=+6251.794393208" lastFinishedPulling="2025-11-21 15:47:44.731170008 +0000 UTC m=+6255.383369267" observedRunningTime="2025-11-21 15:47:45.401278511 +0000 UTC m=+6256.053477780" watchObservedRunningTime="2025-11-21 15:47:45.40512377 +0000 UTC m=+6256.057323029" Nov 21 15:47:47 crc kubenswrapper[4774]: I1121 15:47:47.559606 4774 scope.go:117] "RemoveContainer" containerID="56d55397fb790979133be00a350a9eb897a08b90a68e2febedcead8a0484339e" Nov 21 15:47:47 crc kubenswrapper[4774]: I1121 15:47:47.606396 4774 scope.go:117] "RemoveContainer" containerID="b0d06f7e9eeba2e0a87ee1156fe2e759d6835c7b17a5b532de61ab99dce6d232" Nov 21 15:47:47 crc kubenswrapper[4774]: I1121 15:47:47.676218 4774 scope.go:117] "RemoveContainer" containerID="014f44fa639be6bf4c672cad96a8b59b40e79b0559f7ff0c83508e29381b64b9" Nov 21 15:47:47 crc kubenswrapper[4774]: I1121 15:47:47.718705 4774 scope.go:117] "RemoveContainer" containerID="e721c766a96ccb94f7e98eb9fccb4ed527e375ba1766fdacf8c73602a9193555" Nov 21 15:47:51 crc kubenswrapper[4774]: I1121 15:47:51.921257 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-create-9sr4x"] Nov 21 15:47:51 crc kubenswrapper[4774]: I1121 15:47:51.923292 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:51 crc kubenswrapper[4774]: I1121 15:47:51.934480 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-9sr4x"] Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.115339 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb53ff10-9a77-4bac-ba20-1c93bb97973b-operator-scripts\") pod \"aodh-db-create-9sr4x\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.115475 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ldc4\" (UniqueName: \"kubernetes.io/projected/fb53ff10-9a77-4bac-ba20-1c93bb97973b-kube-api-access-8ldc4\") pod \"aodh-db-create-9sr4x\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.129844 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-4f3c-account-create-f99kd"] Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.133695 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.135525 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-db-secret" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.143111 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-4f3c-account-create-f99kd"] Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.217260 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb53ff10-9a77-4bac-ba20-1c93bb97973b-operator-scripts\") pod \"aodh-db-create-9sr4x\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.217438 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ldc4\" (UniqueName: \"kubernetes.io/projected/fb53ff10-9a77-4bac-ba20-1c93bb97973b-kube-api-access-8ldc4\") pod \"aodh-db-create-9sr4x\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.220758 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb53ff10-9a77-4bac-ba20-1c93bb97973b-operator-scripts\") pod \"aodh-db-create-9sr4x\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.241073 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ldc4\" (UniqueName: \"kubernetes.io/projected/fb53ff10-9a77-4bac-ba20-1c93bb97973b-kube-api-access-8ldc4\") pod \"aodh-db-create-9sr4x\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.241855 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.323985 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab64234b-0075-4e20-9b12-f7282f5a966e-operator-scripts\") pod \"aodh-4f3c-account-create-f99kd\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.324122 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q76vp\" (UniqueName: \"kubernetes.io/projected/ab64234b-0075-4e20-9b12-f7282f5a966e-kube-api-access-q76vp\") pod \"aodh-4f3c-account-create-f99kd\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.426104 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab64234b-0075-4e20-9b12-f7282f5a966e-operator-scripts\") pod \"aodh-4f3c-account-create-f99kd\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.426506 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q76vp\" (UniqueName: \"kubernetes.io/projected/ab64234b-0075-4e20-9b12-f7282f5a966e-kube-api-access-q76vp\") pod \"aodh-4f3c-account-create-f99kd\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.427354 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab64234b-0075-4e20-9b12-f7282f5a966e-operator-scripts\") pod \"aodh-4f3c-account-create-f99kd\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.454403 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q76vp\" (UniqueName: \"kubernetes.io/projected/ab64234b-0075-4e20-9b12-f7282f5a966e-kube-api-access-q76vp\") pod \"aodh-4f3c-account-create-f99kd\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.742241 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-create-9sr4x"] Nov 21 15:47:52 crc kubenswrapper[4774]: I1121 15:47:52.751768 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:53 crc kubenswrapper[4774]: I1121 15:47:53.256400 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-4f3c-account-create-f99kd"] Nov 21 15:47:53 crc kubenswrapper[4774]: W1121 15:47:53.273848 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab64234b_0075_4e20_9b12_f7282f5a966e.slice/crio-c14acdd1193afc2e6d00ded5671651d37ad76fff8da6221f6803bc4b7affe65a WatchSource:0}: Error finding container c14acdd1193afc2e6d00ded5671651d37ad76fff8da6221f6803bc4b7affe65a: Status 404 returned error can't find the container with id c14acdd1193afc2e6d00ded5671651d37ad76fff8da6221f6803bc4b7affe65a Nov 21 15:47:53 crc kubenswrapper[4774]: I1121 15:47:53.469781 4774 generic.go:334] "Generic (PLEG): container finished" podID="fb53ff10-9a77-4bac-ba20-1c93bb97973b" containerID="499198ccabeadbbfa0cccad0b00e94d47cf4e7d5f05f1dc15a0e780b1390e209" exitCode=0 Nov 21 15:47:53 crc kubenswrapper[4774]: I1121 15:47:53.469858 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-9sr4x" event={"ID":"fb53ff10-9a77-4bac-ba20-1c93bb97973b","Type":"ContainerDied","Data":"499198ccabeadbbfa0cccad0b00e94d47cf4e7d5f05f1dc15a0e780b1390e209"} Nov 21 15:47:53 crc kubenswrapper[4774]: I1121 15:47:53.470149 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-9sr4x" event={"ID":"fb53ff10-9a77-4bac-ba20-1c93bb97973b","Type":"ContainerStarted","Data":"bdb5428746a3f08495b525f0012947e7e7e2a7f9aabe0f454ed6d8359382b960"} Nov 21 15:47:53 crc kubenswrapper[4774]: I1121 15:47:53.471996 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-4f3c-account-create-f99kd" event={"ID":"ab64234b-0075-4e20-9b12-f7282f5a966e","Type":"ContainerStarted","Data":"c14acdd1193afc2e6d00ded5671651d37ad76fff8da6221f6803bc4b7affe65a"} Nov 21 15:47:54 crc kubenswrapper[4774]: I1121 15:47:54.483698 4774 generic.go:334] "Generic (PLEG): container finished" podID="ab64234b-0075-4e20-9b12-f7282f5a966e" containerID="62d05e60bbe92a165f70f4fa6e83f3e2e6365eb1eaef820f650fc05f9ba0fdb1" exitCode=0 Nov 21 15:47:54 crc kubenswrapper[4774]: I1121 15:47:54.483805 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-4f3c-account-create-f99kd" event={"ID":"ab64234b-0075-4e20-9b12-f7282f5a966e","Type":"ContainerDied","Data":"62d05e60bbe92a165f70f4fa6e83f3e2e6365eb1eaef820f650fc05f9ba0fdb1"} Nov 21 15:47:54 crc kubenswrapper[4774]: I1121 15:47:54.900863 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.040641 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-sm9vk"] Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.052404 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-sm9vk"] Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.100783 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb53ff10-9a77-4bac-ba20-1c93bb97973b-operator-scripts\") pod \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.101082 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ldc4\" (UniqueName: \"kubernetes.io/projected/fb53ff10-9a77-4bac-ba20-1c93bb97973b-kube-api-access-8ldc4\") pod \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\" (UID: \"fb53ff10-9a77-4bac-ba20-1c93bb97973b\") " Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.101887 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb53ff10-9a77-4bac-ba20-1c93bb97973b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fb53ff10-9a77-4bac-ba20-1c93bb97973b" (UID: "fb53ff10-9a77-4bac-ba20-1c93bb97973b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.108186 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb53ff10-9a77-4bac-ba20-1c93bb97973b-kube-api-access-8ldc4" (OuterVolumeSpecName: "kube-api-access-8ldc4") pod "fb53ff10-9a77-4bac-ba20-1c93bb97973b" (UID: "fb53ff10-9a77-4bac-ba20-1c93bb97973b"). InnerVolumeSpecName "kube-api-access-8ldc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.203466 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ldc4\" (UniqueName: \"kubernetes.io/projected/fb53ff10-9a77-4bac-ba20-1c93bb97973b-kube-api-access-8ldc4\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.203504 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb53ff10-9a77-4bac-ba20-1c93bb97973b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.505397 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-create-9sr4x" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.505407 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-create-9sr4x" event={"ID":"fb53ff10-9a77-4bac-ba20-1c93bb97973b","Type":"ContainerDied","Data":"bdb5428746a3f08495b525f0012947e7e7e2a7f9aabe0f454ed6d8359382b960"} Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.507027 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bdb5428746a3f08495b525f0012947e7e7e2a7f9aabe0f454ed6d8359382b960" Nov 21 15:47:55 crc kubenswrapper[4774]: I1121 15:47:55.932724 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.027448 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-a3cc-account-create-8698w"] Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.035422 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-a3cc-account-create-8698w"] Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.104650 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b10741c8-7c49-45d6-ae01-cb50e0407267" path="/var/lib/kubelet/pods/b10741c8-7c49-45d6-ae01-cb50e0407267/volumes" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.105445 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d06f2978-f05e-410a-82b2-ef92434f5b93" path="/var/lib/kubelet/pods/d06f2978-f05e-410a-82b2-ef92434f5b93/volumes" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.124191 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q76vp\" (UniqueName: \"kubernetes.io/projected/ab64234b-0075-4e20-9b12-f7282f5a966e-kube-api-access-q76vp\") pod \"ab64234b-0075-4e20-9b12-f7282f5a966e\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.124556 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab64234b-0075-4e20-9b12-f7282f5a966e-operator-scripts\") pod \"ab64234b-0075-4e20-9b12-f7282f5a966e\" (UID: \"ab64234b-0075-4e20-9b12-f7282f5a966e\") " Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.125082 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab64234b-0075-4e20-9b12-f7282f5a966e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab64234b-0075-4e20-9b12-f7282f5a966e" (UID: "ab64234b-0075-4e20-9b12-f7282f5a966e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.125432 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab64234b-0075-4e20-9b12-f7282f5a966e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.129097 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab64234b-0075-4e20-9b12-f7282f5a966e-kube-api-access-q76vp" (OuterVolumeSpecName: "kube-api-access-q76vp") pod "ab64234b-0075-4e20-9b12-f7282f5a966e" (UID: "ab64234b-0075-4e20-9b12-f7282f5a966e"). InnerVolumeSpecName "kube-api-access-q76vp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.227531 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q76vp\" (UniqueName: \"kubernetes.io/projected/ab64234b-0075-4e20-9b12-f7282f5a966e-kube-api-access-q76vp\") on node \"crc\" DevicePath \"\"" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.518644 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-4f3c-account-create-f99kd" event={"ID":"ab64234b-0075-4e20-9b12-f7282f5a966e","Type":"ContainerDied","Data":"c14acdd1193afc2e6d00ded5671651d37ad76fff8da6221f6803bc4b7affe65a"} Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.518694 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c14acdd1193afc2e6d00ded5671651d37ad76fff8da6221f6803bc4b7affe65a" Nov 21 15:47:56 crc kubenswrapper[4774]: I1121 15:47:56.518752 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-4f3c-account-create-f99kd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.451681 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-db-sync-jwdbd"] Nov 21 15:47:57 crc kubenswrapper[4774]: E1121 15:47:57.452834 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb53ff10-9a77-4bac-ba20-1c93bb97973b" containerName="mariadb-database-create" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.452858 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb53ff10-9a77-4bac-ba20-1c93bb97973b" containerName="mariadb-database-create" Nov 21 15:47:57 crc kubenswrapper[4774]: E1121 15:47:57.452889 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab64234b-0075-4e20-9b12-f7282f5a966e" containerName="mariadb-account-create" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.452908 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab64234b-0075-4e20-9b12-f7282f5a966e" containerName="mariadb-account-create" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.453252 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb53ff10-9a77-4bac-ba20-1c93bb97973b" containerName="mariadb-database-create" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.453282 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab64234b-0075-4e20-9b12-f7282f5a966e" containerName="mariadb-account-create" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.454511 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.457386 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.457410 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-nfwkf" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.457914 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.463440 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.469722 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-jwdbd"] Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.574304 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-scripts\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.574550 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-config-data\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.574751 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhwqv\" (UniqueName: \"kubernetes.io/projected/951576d9-41e4-44c5-8c46-53b97d979449-kube-api-access-jhwqv\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.575031 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-combined-ca-bundle\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.676809 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-combined-ca-bundle\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.676951 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-scripts\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.676996 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-config-data\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.677027 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhwqv\" (UniqueName: \"kubernetes.io/projected/951576d9-41e4-44c5-8c46-53b97d979449-kube-api-access-jhwqv\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.683981 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-combined-ca-bundle\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.685282 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-config-data\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.685988 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-scripts\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.702525 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhwqv\" (UniqueName: \"kubernetes.io/projected/951576d9-41e4-44c5-8c46-53b97d979449-kube-api-access-jhwqv\") pod \"aodh-db-sync-jwdbd\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:57 crc kubenswrapper[4774]: I1121 15:47:57.832467 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:47:58 crc kubenswrapper[4774]: I1121 15:47:58.410034 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-db-sync-jwdbd"] Nov 21 15:47:58 crc kubenswrapper[4774]: I1121 15:47:58.548386 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-jwdbd" event={"ID":"951576d9-41e4-44c5-8c46-53b97d979449","Type":"ContainerStarted","Data":"a4e7c9431aa297d616a5d1fe5f48e873db393f84dfe3e6fbe26256237d7ea4b1"} Nov 21 15:48:03 crc kubenswrapper[4774]: I1121 15:48:03.605740 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-jwdbd" event={"ID":"951576d9-41e4-44c5-8c46-53b97d979449","Type":"ContainerStarted","Data":"87d7ff61c61be89e9ed17e17b4a4618b8a3b6a228fd79946bd42fad45ef7391c"} Nov 21 15:48:03 crc kubenswrapper[4774]: I1121 15:48:03.650001 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-db-sync-jwdbd" podStartSLOduration=2.326388964 podStartE2EDuration="6.649967904s" podCreationTimestamp="2025-11-21 15:47:57 +0000 UTC" firstStartedPulling="2025-11-21 15:47:58.41804396 +0000 UTC m=+6269.070243219" lastFinishedPulling="2025-11-21 15:48:02.7416229 +0000 UTC m=+6273.393822159" observedRunningTime="2025-11-21 15:48:03.622916122 +0000 UTC m=+6274.275115391" watchObservedRunningTime="2025-11-21 15:48:03.649967904 +0000 UTC m=+6274.302167203" Nov 21 15:48:06 crc kubenswrapper[4774]: I1121 15:48:06.638876 4774 generic.go:334] "Generic (PLEG): container finished" podID="951576d9-41e4-44c5-8c46-53b97d979449" containerID="87d7ff61c61be89e9ed17e17b4a4618b8a3b6a228fd79946bd42fad45ef7391c" exitCode=0 Nov 21 15:48:06 crc kubenswrapper[4774]: I1121 15:48:06.639058 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-jwdbd" event={"ID":"951576d9-41e4-44c5-8c46-53b97d979449","Type":"ContainerDied","Data":"87d7ff61c61be89e9ed17e17b4a4618b8a3b6a228fd79946bd42fad45ef7391c"} Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.035582 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-sw56w"] Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.046339 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-sw56w"] Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.059932 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.108762 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b" path="/var/lib/kubelet/pods/50bee0fc-1a29-4ceb-9761-4c9fe4be1a1b/volumes" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.233724 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-scripts\") pod \"951576d9-41e4-44c5-8c46-53b97d979449\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.233802 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-combined-ca-bundle\") pod \"951576d9-41e4-44c5-8c46-53b97d979449\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.233945 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-config-data\") pod \"951576d9-41e4-44c5-8c46-53b97d979449\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.234171 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhwqv\" (UniqueName: \"kubernetes.io/projected/951576d9-41e4-44c5-8c46-53b97d979449-kube-api-access-jhwqv\") pod \"951576d9-41e4-44c5-8c46-53b97d979449\" (UID: \"951576d9-41e4-44c5-8c46-53b97d979449\") " Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.242282 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/951576d9-41e4-44c5-8c46-53b97d979449-kube-api-access-jhwqv" (OuterVolumeSpecName: "kube-api-access-jhwqv") pod "951576d9-41e4-44c5-8c46-53b97d979449" (UID: "951576d9-41e4-44c5-8c46-53b97d979449"). InnerVolumeSpecName "kube-api-access-jhwqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.242471 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-scripts" (OuterVolumeSpecName: "scripts") pod "951576d9-41e4-44c5-8c46-53b97d979449" (UID: "951576d9-41e4-44c5-8c46-53b97d979449"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.271242 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "951576d9-41e4-44c5-8c46-53b97d979449" (UID: "951576d9-41e4-44c5-8c46-53b97d979449"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.282510 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-config-data" (OuterVolumeSpecName: "config-data") pod "951576d9-41e4-44c5-8c46-53b97d979449" (UID: "951576d9-41e4-44c5-8c46-53b97d979449"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.336939 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhwqv\" (UniqueName: \"kubernetes.io/projected/951576d9-41e4-44c5-8c46-53b97d979449-kube-api-access-jhwqv\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.337656 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.337718 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.337770 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/951576d9-41e4-44c5-8c46-53b97d979449-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.666033 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-db-sync-jwdbd" event={"ID":"951576d9-41e4-44c5-8c46-53b97d979449","Type":"ContainerDied","Data":"a4e7c9431aa297d616a5d1fe5f48e873db393f84dfe3e6fbe26256237d7ea4b1"} Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.666064 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-db-sync-jwdbd" Nov 21 15:48:08 crc kubenswrapper[4774]: I1121 15:48:08.666075 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4e7c9431aa297d616a5d1fe5f48e873db393f84dfe3e6fbe26256237d7ea4b1" Nov 21 15:48:10 crc kubenswrapper[4774]: I1121 15:48:10.680515 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.040970 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/aodh-0"] Nov 21 15:48:12 crc kubenswrapper[4774]: E1121 15:48:12.041903 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="951576d9-41e4-44c5-8c46-53b97d979449" containerName="aodh-db-sync" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.041922 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="951576d9-41e4-44c5-8c46-53b97d979449" containerName="aodh-db-sync" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.042228 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="951576d9-41e4-44c5-8c46-53b97d979449" containerName="aodh-db-sync" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.044724 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.050325 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-scripts" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.050761 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-autoscaling-dockercfg-nfwkf" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.051644 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"aodh-config-data" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.057940 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.225709 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-combined-ca-bundle\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.225787 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-scripts\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.226398 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njmlp\" (UniqueName: \"kubernetes.io/projected/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-kube-api-access-njmlp\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.226661 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-config-data\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.329079 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-combined-ca-bundle\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.329144 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-scripts\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.329173 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njmlp\" (UniqueName: \"kubernetes.io/projected/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-kube-api-access-njmlp\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.329295 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-config-data\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.335447 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-scripts\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.335637 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-combined-ca-bundle\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.336207 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-config-data\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.346355 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njmlp\" (UniqueName: \"kubernetes.io/projected/75d01969-354d-45d1-8bbd-f3fb0b04e0bd-kube-api-access-njmlp\") pod \"aodh-0\" (UID: \"75d01969-354d-45d1-8bbd-f3fb0b04e0bd\") " pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.374056 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/aodh-0" Nov 21 15:48:12 crc kubenswrapper[4774]: I1121 15:48:12.888210 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/aodh-0"] Nov 21 15:48:13 crc kubenswrapper[4774]: I1121 15:48:13.742225 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"75d01969-354d-45d1-8bbd-f3fb0b04e0bd","Type":"ContainerStarted","Data":"73f887e6ca10dbe7b0cf19a2cb444af833e5a3437cd01f53ec0710443247fae5"} Nov 21 15:48:13 crc kubenswrapper[4774]: I1121 15:48:13.927248 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:48:13 crc kubenswrapper[4774]: I1121 15:48:13.927813 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-central-agent" containerID="cri-o://9c2e644cbd715359bafc3823c7b9b2fbb0b7bef4ad3171ace0488dff8b7b79d5" gracePeriod=30 Nov 21 15:48:13 crc kubenswrapper[4774]: I1121 15:48:13.927937 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="proxy-httpd" containerID="cri-o://996299051b2f8238923c0f4af15a79d515fc15425ea65770f188ecffa72ce1a0" gracePeriod=30 Nov 21 15:48:13 crc kubenswrapper[4774]: I1121 15:48:13.927880 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="sg-core" containerID="cri-o://ca45d8a1be0e3e90a6c0496e353d3fb88387b513ca325a4387ebc746bef4b21a" gracePeriod=30 Nov 21 15:48:13 crc kubenswrapper[4774]: I1121 15:48:13.927930 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-notification-agent" containerID="cri-o://0da7c20e891f5e447ffb2d913e87ee88e92dbaeaaf4f11a3edfe2167f63b45d6" gracePeriod=30 Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.753769 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"75d01969-354d-45d1-8bbd-f3fb0b04e0bd","Type":"ContainerStarted","Data":"5ebe93b326c7b66db71c4087c8f3c69f040a27e316034b65293df54a9bb072aa"} Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758033 4774 generic.go:334] "Generic (PLEG): container finished" podID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerID="996299051b2f8238923c0f4af15a79d515fc15425ea65770f188ecffa72ce1a0" exitCode=0 Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758065 4774 generic.go:334] "Generic (PLEG): container finished" podID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerID="ca45d8a1be0e3e90a6c0496e353d3fb88387b513ca325a4387ebc746bef4b21a" exitCode=2 Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758075 4774 generic.go:334] "Generic (PLEG): container finished" podID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerID="0da7c20e891f5e447ffb2d913e87ee88e92dbaeaaf4f11a3edfe2167f63b45d6" exitCode=0 Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758084 4774 generic.go:334] "Generic (PLEG): container finished" podID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerID="9c2e644cbd715359bafc3823c7b9b2fbb0b7bef4ad3171ace0488dff8b7b79d5" exitCode=0 Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758102 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerDied","Data":"996299051b2f8238923c0f4af15a79d515fc15425ea65770f188ecffa72ce1a0"} Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758128 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerDied","Data":"ca45d8a1be0e3e90a6c0496e353d3fb88387b513ca325a4387ebc746bef4b21a"} Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758140 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerDied","Data":"0da7c20e891f5e447ffb2d913e87ee88e92dbaeaaf4f11a3edfe2167f63b45d6"} Nov 21 15:48:14 crc kubenswrapper[4774]: I1121 15:48:14.758149 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerDied","Data":"9c2e644cbd715359bafc3823c7b9b2fbb0b7bef4ad3171ace0488dff8b7b79d5"} Nov 21 15:48:15 crc kubenswrapper[4774]: I1121 15:48:15.877401 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024403 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-combined-ca-bundle\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024454 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-log-httpd\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024603 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-config-data\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024639 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqvs9\" (UniqueName: \"kubernetes.io/projected/9e3707c7-d51e-4671-9252-aec6c00606d3-kube-api-access-hqvs9\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024670 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-scripts\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024701 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-sg-core-conf-yaml\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.024738 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-run-httpd\") pod \"9e3707c7-d51e-4671-9252-aec6c00606d3\" (UID: \"9e3707c7-d51e-4671-9252-aec6c00606d3\") " Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.025498 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.025807 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.031312 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e3707c7-d51e-4671-9252-aec6c00606d3-kube-api-access-hqvs9" (OuterVolumeSpecName: "kube-api-access-hqvs9") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "kube-api-access-hqvs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.032257 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-scripts" (OuterVolumeSpecName: "scripts") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.070638 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.125716 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.136629 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-config-data" (OuterVolumeSpecName: "config-data") pod "9e3707c7-d51e-4671-9252-aec6c00606d3" (UID: "9e3707c7-d51e-4671-9252-aec6c00606d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.146698 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.146917 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.146977 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.147060 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqvs9\" (UniqueName: \"kubernetes.io/projected/9e3707c7-d51e-4671-9252-aec6c00606d3-kube-api-access-hqvs9\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.147122 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.147185 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9e3707c7-d51e-4671-9252-aec6c00606d3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.147236 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9e3707c7-d51e-4671-9252-aec6c00606d3-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.798730 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"9e3707c7-d51e-4671-9252-aec6c00606d3","Type":"ContainerDied","Data":"86d590f9e0b6803b5b4cc694cfd969937bbb173121f4cca97487d48a7759419c"} Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.798860 4774 scope.go:117] "RemoveContainer" containerID="996299051b2f8238923c0f4af15a79d515fc15425ea65770f188ecffa72ce1a0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.798902 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.837510 4774 scope.go:117] "RemoveContainer" containerID="ca45d8a1be0e3e90a6c0496e353d3fb88387b513ca325a4387ebc746bef4b21a" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.841021 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.856554 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.870141 4774 scope.go:117] "RemoveContainer" containerID="0da7c20e891f5e447ffb2d913e87ee88e92dbaeaaf4f11a3edfe2167f63b45d6" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.870286 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:48:16 crc kubenswrapper[4774]: E1121 15:48:16.870730 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-central-agent" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.870747 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-central-agent" Nov 21 15:48:16 crc kubenswrapper[4774]: E1121 15:48:16.870760 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-notification-agent" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.870766 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-notification-agent" Nov 21 15:48:16 crc kubenswrapper[4774]: E1121 15:48:16.870797 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="sg-core" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.870804 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="sg-core" Nov 21 15:48:16 crc kubenswrapper[4774]: E1121 15:48:16.870831 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="proxy-httpd" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.870837 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="proxy-httpd" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.871047 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="sg-core" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.871065 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-central-agent" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.871079 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="proxy-httpd" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.871090 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" containerName="ceilometer-notification-agent" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.873226 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.876322 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.876492 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.885778 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.900601 4774 scope.go:117] "RemoveContainer" containerID="9c2e644cbd715359bafc3823c7b9b2fbb0b7bef4ad3171ace0488dff8b7b79d5" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.961984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.962139 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v698\" (UniqueName: \"kubernetes.io/projected/cf37bad3-5197-4496-996c-58807e46f313-kube-api-access-4v698\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.962198 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-log-httpd\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.962249 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-run-httpd\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.962306 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.962414 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-config-data\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:16 crc kubenswrapper[4774]: I1121 15:48:16.962498 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-scripts\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.063947 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-scripts\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.064036 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.064089 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v698\" (UniqueName: \"kubernetes.io/projected/cf37bad3-5197-4496-996c-58807e46f313-kube-api-access-4v698\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.064114 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-log-httpd\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.064141 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-run-httpd\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.064177 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.064219 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-config-data\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.065505 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-log-httpd\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.065992 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-run-httpd\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.069604 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-scripts\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.070162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-config-data\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.071086 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.084636 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.090911 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v698\" (UniqueName: \"kubernetes.io/projected/cf37bad3-5197-4496-996c-58807e46f313-kube-api-access-4v698\") pod \"ceilometer-0\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.197024 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:48:17 crc kubenswrapper[4774]: I1121 15:48:17.697161 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:48:18 crc kubenswrapper[4774]: I1121 15:48:18.107745 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e3707c7-d51e-4671-9252-aec6c00606d3" path="/var/lib/kubelet/pods/9e3707c7-d51e-4671-9252-aec6c00606d3/volumes" Nov 21 15:48:18 crc kubenswrapper[4774]: W1121 15:48:18.198630 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf37bad3_5197_4496_996c_58807e46f313.slice/crio-14743217648f7abe3134b31da8693755ca5354d0a663d40d850fe12622af9aa6 WatchSource:0}: Error finding container 14743217648f7abe3134b31da8693755ca5354d0a663d40d850fe12622af9aa6: Status 404 returned error can't find the container with id 14743217648f7abe3134b31da8693755ca5354d0a663d40d850fe12622af9aa6 Nov 21 15:48:18 crc kubenswrapper[4774]: I1121 15:48:18.823159 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerStarted","Data":"14743217648f7abe3134b31da8693755ca5354d0a663d40d850fe12622af9aa6"} Nov 21 15:48:19 crc kubenswrapper[4774]: I1121 15:48:19.837439 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerStarted","Data":"10f2769edb6bff1efa8150cb1499cf56d794e1f63d857b5a733c5140d070f5ac"} Nov 21 15:48:19 crc kubenswrapper[4774]: I1121 15:48:19.841151 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"75d01969-354d-45d1-8bbd-f3fb0b04e0bd","Type":"ContainerStarted","Data":"c7a3d667881bf91ffd4ae13b335bb53e458f6a6f4292a60cd7c91d7663eb31e0"} Nov 21 15:48:20 crc kubenswrapper[4774]: I1121 15:48:20.857387 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerStarted","Data":"5bcb510b3f402253f6899262c01d3c0bd136c7177ef76053cab47f16e92642c1"} Nov 21 15:48:23 crc kubenswrapper[4774]: I1121 15:48:23.884784 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"75d01969-354d-45d1-8bbd-f3fb0b04e0bd","Type":"ContainerStarted","Data":"66e04da55ad19c17b7cb5f238ae665e3fa400e7c61242086d30d04cfe3aa3bcb"} Nov 21 15:48:23 crc kubenswrapper[4774]: I1121 15:48:23.887103 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerStarted","Data":"0eb665f9146b3fc46a3b8dc098286f69c26e6b62eed004d98eb3da56450dbbb1"} Nov 21 15:48:26 crc kubenswrapper[4774]: I1121 15:48:26.942691 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerStarted","Data":"ccf49c063658e616267ad153063371a5c619ac0c643368688175cb3e476aa00c"} Nov 21 15:48:26 crc kubenswrapper[4774]: I1121 15:48:26.944293 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 15:48:26 crc kubenswrapper[4774]: I1121 15:48:26.947622 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/aodh-0" event={"ID":"75d01969-354d-45d1-8bbd-f3fb0b04e0bd","Type":"ContainerStarted","Data":"42ed9c1fedad13960dd83d516eaa09c6d7877c5a1e59ef28b24e6a874e305ebd"} Nov 21 15:48:26 crc kubenswrapper[4774]: I1121 15:48:26.974454 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.026285447 podStartE2EDuration="10.974436208s" podCreationTimestamp="2025-11-21 15:48:16 +0000 UTC" firstStartedPulling="2025-11-21 15:48:18.209527984 +0000 UTC m=+6288.861727253" lastFinishedPulling="2025-11-21 15:48:26.157678755 +0000 UTC m=+6296.809878014" observedRunningTime="2025-11-21 15:48:26.96713362 +0000 UTC m=+6297.619332879" watchObservedRunningTime="2025-11-21 15:48:26.974436208 +0000 UTC m=+6297.626635467" Nov 21 15:48:26 crc kubenswrapper[4774]: I1121 15:48:26.996881 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/aodh-0" podStartSLOduration=2.329737018 podStartE2EDuration="15.996862927s" podCreationTimestamp="2025-11-21 15:48:11 +0000 UTC" firstStartedPulling="2025-11-21 15:48:12.895068177 +0000 UTC m=+6283.547267436" lastFinishedPulling="2025-11-21 15:48:26.562194086 +0000 UTC m=+6297.214393345" observedRunningTime="2025-11-21 15:48:26.988669474 +0000 UTC m=+6297.640868743" watchObservedRunningTime="2025-11-21 15:48:26.996862927 +0000 UTC m=+6297.649062196" Nov 21 15:48:29 crc kubenswrapper[4774]: I1121 15:48:29.601422 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:48:29 crc kubenswrapper[4774]: I1121 15:48:29.602002 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.651147 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-create-6gh4k"] Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.653624 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.689004 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-6gh4k"] Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.757277 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-3c58-account-create-tnkjp"] Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.758891 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.761053 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-db-secret" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.766767 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-3c58-account-create-tnkjp"] Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.831960 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9ab910-701e-4dcc-baae-ef62322dc2da-operator-scripts\") pod \"manila-db-create-6gh4k\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.832086 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hm59n\" (UniqueName: \"kubernetes.io/projected/7e9ab910-701e-4dcc-baae-ef62322dc2da-kube-api-access-hm59n\") pod \"manila-db-create-6gh4k\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.934063 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf189446-356a-40a4-bea0-52433af962d0-operator-scripts\") pod \"manila-3c58-account-create-tnkjp\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.934231 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfjz7\" (UniqueName: \"kubernetes.io/projected/bf189446-356a-40a4-bea0-52433af962d0-kube-api-access-lfjz7\") pod \"manila-3c58-account-create-tnkjp\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.934291 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9ab910-701e-4dcc-baae-ef62322dc2da-operator-scripts\") pod \"manila-db-create-6gh4k\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.934443 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hm59n\" (UniqueName: \"kubernetes.io/projected/7e9ab910-701e-4dcc-baae-ef62322dc2da-kube-api-access-hm59n\") pod \"manila-db-create-6gh4k\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.935946 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9ab910-701e-4dcc-baae-ef62322dc2da-operator-scripts\") pod \"manila-db-create-6gh4k\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.954670 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hm59n\" (UniqueName: \"kubernetes.io/projected/7e9ab910-701e-4dcc-baae-ef62322dc2da-kube-api-access-hm59n\") pod \"manila-db-create-6gh4k\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:36 crc kubenswrapper[4774]: I1121 15:48:36.994708 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.036455 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf189446-356a-40a4-bea0-52433af962d0-operator-scripts\") pod \"manila-3c58-account-create-tnkjp\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.036765 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfjz7\" (UniqueName: \"kubernetes.io/projected/bf189446-356a-40a4-bea0-52433af962d0-kube-api-access-lfjz7\") pod \"manila-3c58-account-create-tnkjp\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.038138 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf189446-356a-40a4-bea0-52433af962d0-operator-scripts\") pod \"manila-3c58-account-create-tnkjp\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.055629 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfjz7\" (UniqueName: \"kubernetes.io/projected/bf189446-356a-40a4-bea0-52433af962d0-kube-api-access-lfjz7\") pod \"manila-3c58-account-create-tnkjp\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.086645 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.519590 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-create-6gh4k"] Nov 21 15:48:37 crc kubenswrapper[4774]: W1121 15:48:37.666511 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf189446_356a_40a4_bea0_52433af962d0.slice/crio-e8bc4967688c24dc9cdc35f90adbaebe34c4924b9512aadb0feb43ce57b4d5df WatchSource:0}: Error finding container e8bc4967688c24dc9cdc35f90adbaebe34c4924b9512aadb0feb43ce57b4d5df: Status 404 returned error can't find the container with id e8bc4967688c24dc9cdc35f90adbaebe34c4924b9512aadb0feb43ce57b4d5df Nov 21 15:48:37 crc kubenswrapper[4774]: I1121 15:48:37.666784 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-3c58-account-create-tnkjp"] Nov 21 15:48:38 crc kubenswrapper[4774]: I1121 15:48:38.052683 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-6gh4k" event={"ID":"7e9ab910-701e-4dcc-baae-ef62322dc2da","Type":"ContainerStarted","Data":"c610da57df3c95c435e2046d9dc916bc2f234851566fbc953fc57d91dd1a7971"} Nov 21 15:48:38 crc kubenswrapper[4774]: I1121 15:48:38.053022 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-6gh4k" event={"ID":"7e9ab910-701e-4dcc-baae-ef62322dc2da","Type":"ContainerStarted","Data":"219c810f58df8230a5ecbd81f83d62f343c005dcd7f8f3bc9cb7c62d0d5c322f"} Nov 21 15:48:38 crc kubenswrapper[4774]: I1121 15:48:38.054600 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3c58-account-create-tnkjp" event={"ID":"bf189446-356a-40a4-bea0-52433af962d0","Type":"ContainerStarted","Data":"0e0b0b0ffb31c124ec5331935fb0e508650cf594772babd684b0d5230e242873"} Nov 21 15:48:38 crc kubenswrapper[4774]: I1121 15:48:38.054632 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3c58-account-create-tnkjp" event={"ID":"bf189446-356a-40a4-bea0-52433af962d0","Type":"ContainerStarted","Data":"e8bc4967688c24dc9cdc35f90adbaebe34c4924b9512aadb0feb43ce57b4d5df"} Nov 21 15:48:38 crc kubenswrapper[4774]: I1121 15:48:38.071000 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-create-6gh4k" podStartSLOduration=2.07098032 podStartE2EDuration="2.07098032s" podCreationTimestamp="2025-11-21 15:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:48:38.068972983 +0000 UTC m=+6308.721172242" watchObservedRunningTime="2025-11-21 15:48:38.07098032 +0000 UTC m=+6308.723179579" Nov 21 15:48:38 crc kubenswrapper[4774]: I1121 15:48:38.088654 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-3c58-account-create-tnkjp" podStartSLOduration=2.088637663 podStartE2EDuration="2.088637663s" podCreationTimestamp="2025-11-21 15:48:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:48:38.083144617 +0000 UTC m=+6308.735343876" watchObservedRunningTime="2025-11-21 15:48:38.088637663 +0000 UTC m=+6308.740836922" Nov 21 15:48:39 crc kubenswrapper[4774]: I1121 15:48:39.069529 4774 generic.go:334] "Generic (PLEG): container finished" podID="bf189446-356a-40a4-bea0-52433af962d0" containerID="0e0b0b0ffb31c124ec5331935fb0e508650cf594772babd684b0d5230e242873" exitCode=0 Nov 21 15:48:39 crc kubenswrapper[4774]: I1121 15:48:39.069678 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3c58-account-create-tnkjp" event={"ID":"bf189446-356a-40a4-bea0-52433af962d0","Type":"ContainerDied","Data":"0e0b0b0ffb31c124ec5331935fb0e508650cf594772babd684b0d5230e242873"} Nov 21 15:48:39 crc kubenswrapper[4774]: I1121 15:48:39.073317 4774 generic.go:334] "Generic (PLEG): container finished" podID="7e9ab910-701e-4dcc-baae-ef62322dc2da" containerID="c610da57df3c95c435e2046d9dc916bc2f234851566fbc953fc57d91dd1a7971" exitCode=0 Nov 21 15:48:39 crc kubenswrapper[4774]: I1121 15:48:39.073389 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-6gh4k" event={"ID":"7e9ab910-701e-4dcc-baae-ef62322dc2da","Type":"ContainerDied","Data":"c610da57df3c95c435e2046d9dc916bc2f234851566fbc953fc57d91dd1a7971"} Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.623108 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.636053 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.748634 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hm59n\" (UniqueName: \"kubernetes.io/projected/7e9ab910-701e-4dcc-baae-ef62322dc2da-kube-api-access-hm59n\") pod \"7e9ab910-701e-4dcc-baae-ef62322dc2da\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.748887 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfjz7\" (UniqueName: \"kubernetes.io/projected/bf189446-356a-40a4-bea0-52433af962d0-kube-api-access-lfjz7\") pod \"bf189446-356a-40a4-bea0-52433af962d0\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.748959 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9ab910-701e-4dcc-baae-ef62322dc2da-operator-scripts\") pod \"7e9ab910-701e-4dcc-baae-ef62322dc2da\" (UID: \"7e9ab910-701e-4dcc-baae-ef62322dc2da\") " Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.749021 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf189446-356a-40a4-bea0-52433af962d0-operator-scripts\") pod \"bf189446-356a-40a4-bea0-52433af962d0\" (UID: \"bf189446-356a-40a4-bea0-52433af962d0\") " Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.749795 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e9ab910-701e-4dcc-baae-ef62322dc2da-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e9ab910-701e-4dcc-baae-ef62322dc2da" (UID: "7e9ab910-701e-4dcc-baae-ef62322dc2da"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.749868 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf189446-356a-40a4-bea0-52433af962d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bf189446-356a-40a4-bea0-52433af962d0" (UID: "bf189446-356a-40a4-bea0-52433af962d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.754675 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf189446-356a-40a4-bea0-52433af962d0-kube-api-access-lfjz7" (OuterVolumeSpecName: "kube-api-access-lfjz7") pod "bf189446-356a-40a4-bea0-52433af962d0" (UID: "bf189446-356a-40a4-bea0-52433af962d0"). InnerVolumeSpecName "kube-api-access-lfjz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.755126 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e9ab910-701e-4dcc-baae-ef62322dc2da-kube-api-access-hm59n" (OuterVolumeSpecName: "kube-api-access-hm59n") pod "7e9ab910-701e-4dcc-baae-ef62322dc2da" (UID: "7e9ab910-701e-4dcc-baae-ef62322dc2da"). InnerVolumeSpecName "kube-api-access-hm59n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.852183 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfjz7\" (UniqueName: \"kubernetes.io/projected/bf189446-356a-40a4-bea0-52433af962d0-kube-api-access-lfjz7\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.852226 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e9ab910-701e-4dcc-baae-ef62322dc2da-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.852238 4774 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf189446-356a-40a4-bea0-52433af962d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:40 crc kubenswrapper[4774]: I1121 15:48:40.852251 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hm59n\" (UniqueName: \"kubernetes.io/projected/7e9ab910-701e-4dcc-baae-ef62322dc2da-kube-api-access-hm59n\") on node \"crc\" DevicePath \"\"" Nov 21 15:48:41 crc kubenswrapper[4774]: I1121 15:48:41.097509 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-create-6gh4k" Nov 21 15:48:41 crc kubenswrapper[4774]: I1121 15:48:41.097515 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-create-6gh4k" event={"ID":"7e9ab910-701e-4dcc-baae-ef62322dc2da","Type":"ContainerDied","Data":"219c810f58df8230a5ecbd81f83d62f343c005dcd7f8f3bc9cb7c62d0d5c322f"} Nov 21 15:48:41 crc kubenswrapper[4774]: I1121 15:48:41.097709 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="219c810f58df8230a5ecbd81f83d62f343c005dcd7f8f3bc9cb7c62d0d5c322f" Nov 21 15:48:41 crc kubenswrapper[4774]: I1121 15:48:41.099703 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-3c58-account-create-tnkjp" event={"ID":"bf189446-356a-40a4-bea0-52433af962d0","Type":"ContainerDied","Data":"e8bc4967688c24dc9cdc35f90adbaebe34c4924b9512aadb0feb43ce57b4d5df"} Nov 21 15:48:41 crc kubenswrapper[4774]: I1121 15:48:41.099754 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8bc4967688c24dc9cdc35f90adbaebe34c4924b9512aadb0feb43ce57b4d5df" Nov 21 15:48:41 crc kubenswrapper[4774]: I1121 15:48:41.099792 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-3c58-account-create-tnkjp" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.236025 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-db-sync-rdhgd"] Nov 21 15:48:42 crc kubenswrapper[4774]: E1121 15:48:42.237122 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf189446-356a-40a4-bea0-52433af962d0" containerName="mariadb-account-create" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.237137 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf189446-356a-40a4-bea0-52433af962d0" containerName="mariadb-account-create" Nov 21 15:48:42 crc kubenswrapper[4774]: E1121 15:48:42.237159 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e9ab910-701e-4dcc-baae-ef62322dc2da" containerName="mariadb-database-create" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.237164 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e9ab910-701e-4dcc-baae-ef62322dc2da" containerName="mariadb-database-create" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.237397 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf189446-356a-40a4-bea0-52433af962d0" containerName="mariadb-account-create" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.237424 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e9ab910-701e-4dcc-baae-ef62322dc2da" containerName="mariadb-database-create" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.238443 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.240934 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.253294 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-rdhgd"] Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.253776 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-ks2b2" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.399833 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-config-data\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.399888 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-job-config-data\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.400144 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlpb8\" (UniqueName: \"kubernetes.io/projected/4b60322c-4057-471e-9af8-c69b88f7df9f-kube-api-access-dlpb8\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.400265 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-combined-ca-bundle\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.502028 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlpb8\" (UniqueName: \"kubernetes.io/projected/4b60322c-4057-471e-9af8-c69b88f7df9f-kube-api-access-dlpb8\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.502326 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-combined-ca-bundle\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.502471 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-config-data\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.502593 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-job-config-data\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.509082 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-config-data\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.510124 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-job-config-data\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.515279 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-combined-ca-bundle\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.519162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlpb8\" (UniqueName: \"kubernetes.io/projected/4b60322c-4057-471e-9af8-c69b88f7df9f-kube-api-access-dlpb8\") pod \"manila-db-sync-rdhgd\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:42 crc kubenswrapper[4774]: I1121 15:48:42.576264 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-rdhgd" Nov 21 15:48:43 crc kubenswrapper[4774]: I1121 15:48:43.682542 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-db-sync-rdhgd"] Nov 21 15:48:44 crc kubenswrapper[4774]: I1121 15:48:44.139223 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-rdhgd" event={"ID":"4b60322c-4057-471e-9af8-c69b88f7df9f","Type":"ContainerStarted","Data":"f138eb93b478fd73af8460fbca05b18013c63f24f177490003cdbe91f08c0e58"} Nov 21 15:48:47 crc kubenswrapper[4774]: I1121 15:48:47.203141 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 21 15:48:47 crc kubenswrapper[4774]: I1121 15:48:47.873097 4774 scope.go:117] "RemoveContainer" containerID="b046e17a120a2ffc31c0fc4df7eec43593214fd5d3b2e5ecc13499c516b4cbbc" Nov 21 15:48:51 crc kubenswrapper[4774]: I1121 15:48:51.085306 4774 scope.go:117] "RemoveContainer" containerID="2dca65d009d625af683cb089814d248bffd13cbf441bd14e97a305579256ba13" Nov 21 15:48:51 crc kubenswrapper[4774]: I1121 15:48:51.119740 4774 scope.go:117] "RemoveContainer" containerID="88e9c7a8b405f5fd6532dec331dd429627d589bb4ff973b3770fb62f3cbc4e76" Nov 21 15:48:53 crc kubenswrapper[4774]: I1121 15:48:53.241173 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-rdhgd" event={"ID":"4b60322c-4057-471e-9af8-c69b88f7df9f","Type":"ContainerStarted","Data":"d81a2c8ac0e573f67d402c42dbe491e22650d75817a2e42cdff69d30cc7439a0"} Nov 21 15:48:53 crc kubenswrapper[4774]: I1121 15:48:53.257981 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-db-sync-rdhgd" podStartSLOduration=3.358337736 podStartE2EDuration="11.257968695s" podCreationTimestamp="2025-11-21 15:48:42 +0000 UTC" firstStartedPulling="2025-11-21 15:48:43.685149149 +0000 UTC m=+6314.337348408" lastFinishedPulling="2025-11-21 15:48:51.584780108 +0000 UTC m=+6322.236979367" observedRunningTime="2025-11-21 15:48:53.257500271 +0000 UTC m=+6323.909699530" watchObservedRunningTime="2025-11-21 15:48:53.257968695 +0000 UTC m=+6323.910167954" Nov 21 15:48:59 crc kubenswrapper[4774]: I1121 15:48:59.600913 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:48:59 crc kubenswrapper[4774]: I1121 15:48:59.601692 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:49:00 crc kubenswrapper[4774]: I1121 15:49:00.305296 4774 generic.go:334] "Generic (PLEG): container finished" podID="4b60322c-4057-471e-9af8-c69b88f7df9f" containerID="d81a2c8ac0e573f67d402c42dbe491e22650d75817a2e42cdff69d30cc7439a0" exitCode=0 Nov 21 15:49:00 crc kubenswrapper[4774]: I1121 15:49:00.305382 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-rdhgd" event={"ID":"4b60322c-4057-471e-9af8-c69b88f7df9f","Type":"ContainerDied","Data":"d81a2c8ac0e573f67d402c42dbe491e22650d75817a2e42cdff69d30cc7439a0"} Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.801220 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-rdhgd" Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.930512 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-combined-ca-bundle\") pod \"4b60322c-4057-471e-9af8-c69b88f7df9f\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.930683 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-job-config-data\") pod \"4b60322c-4057-471e-9af8-c69b88f7df9f\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.930774 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlpb8\" (UniqueName: \"kubernetes.io/projected/4b60322c-4057-471e-9af8-c69b88f7df9f-kube-api-access-dlpb8\") pod \"4b60322c-4057-471e-9af8-c69b88f7df9f\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.931006 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-config-data\") pod \"4b60322c-4057-471e-9af8-c69b88f7df9f\" (UID: \"4b60322c-4057-471e-9af8-c69b88f7df9f\") " Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.940160 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "4b60322c-4057-471e-9af8-c69b88f7df9f" (UID: "4b60322c-4057-471e-9af8-c69b88f7df9f"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.941538 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b60322c-4057-471e-9af8-c69b88f7df9f-kube-api-access-dlpb8" (OuterVolumeSpecName: "kube-api-access-dlpb8") pod "4b60322c-4057-471e-9af8-c69b88f7df9f" (UID: "4b60322c-4057-471e-9af8-c69b88f7df9f"). InnerVolumeSpecName "kube-api-access-dlpb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.948015 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-config-data" (OuterVolumeSpecName: "config-data") pod "4b60322c-4057-471e-9af8-c69b88f7df9f" (UID: "4b60322c-4057-471e-9af8-c69b88f7df9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:01 crc kubenswrapper[4774]: I1121 15:49:01.962256 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b60322c-4057-471e-9af8-c69b88f7df9f" (UID: "4b60322c-4057-471e-9af8-c69b88f7df9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.041792 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlpb8\" (UniqueName: \"kubernetes.io/projected/4b60322c-4057-471e-9af8-c69b88f7df9f-kube-api-access-dlpb8\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.041875 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.041894 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.041912 4774 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/4b60322c-4057-471e-9af8-c69b88f7df9f-job-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.323760 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-db-sync-rdhgd" event={"ID":"4b60322c-4057-471e-9af8-c69b88f7df9f","Type":"ContainerDied","Data":"f138eb93b478fd73af8460fbca05b18013c63f24f177490003cdbe91f08c0e58"} Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.323798 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f138eb93b478fd73af8460fbca05b18013c63f24f177490003cdbe91f08c0e58" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.323806 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/manila-db-sync-rdhgd" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.701347 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-share-share1-0"] Nov 21 15:49:02 crc kubenswrapper[4774]: E1121 15:49:02.702208 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b60322c-4057-471e-9af8-c69b88f7df9f" containerName="manila-db-sync" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.702230 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b60322c-4057-471e-9af8-c69b88f7df9f" containerName="manila-db-sync" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.702572 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b60322c-4057-471e-9af8-c69b88f7df9f" containerName="manila-db-sync" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.704541 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.708234 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-share-share1-config-data" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.708568 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-config-data" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.708717 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-manila-dockercfg-ks2b2" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.709625 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scripts" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.714229 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-scheduler-0"] Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.723495 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.729059 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-scheduler-config-data" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.760245 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.760538 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.760618 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czzrm\" (UniqueName: \"kubernetes.io/projected/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-kube-api-access-czzrm\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.760739 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.760861 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mzjh\" (UniqueName: \"kubernetes.io/projected/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-kube-api-access-9mzjh\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.760944 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-ceph\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761045 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-config-data\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761124 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761190 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-scripts\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761268 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761360 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761460 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-config-data\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761574 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-scripts\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.761659 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.766628 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.782934 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.811522 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7ffc4959f5-pb5rf"] Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.814338 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.828655 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ffc4959f5-pb5rf"] Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864595 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-config-data\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864645 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-scripts\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864667 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-nb\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864708 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864745 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864774 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864795 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czzrm\" (UniqueName: \"kubernetes.io/projected/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-kube-api-access-czzrm\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.864975 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w46tt\" (UniqueName: \"kubernetes.io/projected/679926e0-e43b-4657-b63d-4ff6af9ed155-kube-api-access-w46tt\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865018 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-sb\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865045 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865100 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mzjh\" (UniqueName: \"kubernetes.io/projected/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-kube-api-access-9mzjh\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865121 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-ceph\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865151 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-config-data\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865178 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-config\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865213 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865242 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-scripts\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865273 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.865296 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-dns-svc\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.866258 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.870809 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.870936 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.872701 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-config-data\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.873566 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-scripts\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.878608 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.879049 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-scripts\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.879461 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/projected/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-ceph\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.880065 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.884808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-config-data\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.884898 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.886713 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-combined-ca-bundle\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.888552 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mzjh\" (UniqueName: \"kubernetes.io/projected/fc220fc3-ac7d-4c67-8b6c-cb3f15e61099-kube-api-access-9mzjh\") pod \"manila-scheduler-0\" (UID: \"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099\") " pod="openstack/manila-scheduler-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.897283 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czzrm\" (UniqueName: \"kubernetes.io/projected/7187f8c3-a88d-4b53-9f36-3d3aaa44a426-kube-api-access-czzrm\") pod \"manila-share-share1-0\" (UID: \"7187f8c3-a88d-4b53-9f36-3d3aaa44a426\") " pod="openstack/manila-share-share1-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.969526 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-nb\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.969667 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w46tt\" (UniqueName: \"kubernetes.io/projected/679926e0-e43b-4657-b63d-4ff6af9ed155-kube-api-access-w46tt\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.969708 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-sb\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.969769 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-config\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.969803 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-dns-svc\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.970882 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-dns-svc\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.971192 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-nb\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.971459 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-sb\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.972036 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-config\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.972205 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/manila-api-0"] Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.973965 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.977914 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"manila-api-config-data" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.989175 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w46tt\" (UniqueName: \"kubernetes.io/projected/679926e0-e43b-4657-b63d-4ff6af9ed155-kube-api-access-w46tt\") pod \"dnsmasq-dns-7ffc4959f5-pb5rf\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:02 crc kubenswrapper[4774]: I1121 15:49:02.994735 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.044193 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-share-share1-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.056018 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-scheduler-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.071544 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-config-data-custom\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.071659 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c773d1ce-8355-4e9e-b667-259d4090ae94-logs\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.071695 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-scripts\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.071765 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c773d1ce-8355-4e9e-b667-259d4090ae94-etc-machine-id\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.071981 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.072033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-config-data\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.072072 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lfp4\" (UniqueName: \"kubernetes.io/projected/c773d1ce-8355-4e9e-b667-259d4090ae94-kube-api-access-4lfp4\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.133893 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177104 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-config-data\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177457 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lfp4\" (UniqueName: \"kubernetes.io/projected/c773d1ce-8355-4e9e-b667-259d4090ae94-kube-api-access-4lfp4\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177545 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-config-data-custom\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177631 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c773d1ce-8355-4e9e-b667-259d4090ae94-logs\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177676 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-scripts\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177758 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c773d1ce-8355-4e9e-b667-259d4090ae94-etc-machine-id\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.177853 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.181027 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c773d1ce-8355-4e9e-b667-259d4090ae94-etc-machine-id\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.198594 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c773d1ce-8355-4e9e-b667-259d4090ae94-logs\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.199939 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-config-data\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.200707 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-config-data-custom\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.201049 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.201610 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c773d1ce-8355-4e9e-b667-259d4090ae94-scripts\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.204678 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lfp4\" (UniqueName: \"kubernetes.io/projected/c773d1ce-8355-4e9e-b667-259d4090ae94-kube-api-access-4lfp4\") pod \"manila-api-0\" (UID: \"c773d1ce-8355-4e9e-b667-259d4090ae94\") " pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.361430 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/manila-api-0" Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.824323 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-scheduler-0"] Nov 21 15:49:03 crc kubenswrapper[4774]: I1121 15:49:03.917522 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-share-share1-0"] Nov 21 15:49:04 crc kubenswrapper[4774]: I1121 15:49:04.139679 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7ffc4959f5-pb5rf"] Nov 21 15:49:04 crc kubenswrapper[4774]: I1121 15:49:04.263883 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/manila-api-0"] Nov 21 15:49:04 crc kubenswrapper[4774]: I1121 15:49:04.353563 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099","Type":"ContainerStarted","Data":"fac898997469babe22b9b0878b096435889f186cf2144d5065e55943f5f2ae6d"} Nov 21 15:49:04 crc kubenswrapper[4774]: I1121 15:49:04.357019 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"c773d1ce-8355-4e9e-b667-259d4090ae94","Type":"ContainerStarted","Data":"9deeb89beb277a980a782b260695ca337cff5123a775c7d173291a62a687d459"} Nov 21 15:49:04 crc kubenswrapper[4774]: I1121 15:49:04.358116 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"7187f8c3-a88d-4b53-9f36-3d3aaa44a426","Type":"ContainerStarted","Data":"f7297da04fee1d30710aba0cba1266c8d54006fa7db8c170acc21ec5c19f9b81"} Nov 21 15:49:04 crc kubenswrapper[4774]: I1121 15:49:04.358952 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" event={"ID":"679926e0-e43b-4657-b63d-4ff6af9ed155","Type":"ContainerStarted","Data":"b5e88413aafbb9d664749e5f43a3287c3e1852cd35c2f8bff661613f49dfb4d3"} Nov 21 15:49:06 crc kubenswrapper[4774]: I1121 15:49:06.398713 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"c773d1ce-8355-4e9e-b667-259d4090ae94","Type":"ContainerStarted","Data":"712ed6934d30c4a2585b7883ecda2296128d40fa93fd6a7802d8270358555055"} Nov 21 15:49:06 crc kubenswrapper[4774]: I1121 15:49:06.401685 4774 generic.go:334] "Generic (PLEG): container finished" podID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerID="8675e79840e4507be66ff05a4e7fee318e98e14285f0d3a119e044593b4e4911" exitCode=0 Nov 21 15:49:06 crc kubenswrapper[4774]: I1121 15:49:06.401722 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" event={"ID":"679926e0-e43b-4657-b63d-4ff6af9ed155","Type":"ContainerDied","Data":"8675e79840e4507be66ff05a4e7fee318e98e14285f0d3a119e044593b4e4911"} Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.440631 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099","Type":"ContainerStarted","Data":"24238be7bcce80ba5a67c167dda4a5bfcf5ebd58a50004fa87b5548797e51b00"} Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.453694 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-api-0" event={"ID":"c773d1ce-8355-4e9e-b667-259d4090ae94","Type":"ContainerStarted","Data":"bb838e750d6f181d1137030e6e0760bb646669321b473e04d2d344528c1a48f2"} Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.453933 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/manila-api-0" Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.471883 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" event={"ID":"679926e0-e43b-4657-b63d-4ff6af9ed155","Type":"ContainerStarted","Data":"a4407449da6b487a8d8d14f62cd079f7e36906f3c8f6e840bf5dbf748acae367"} Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.472175 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.489595 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-api-0" podStartSLOduration=5.489568426 podStartE2EDuration="5.489568426s" podCreationTimestamp="2025-11-21 15:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:49:07.478213292 +0000 UTC m=+6338.130412561" watchObservedRunningTime="2025-11-21 15:49:07.489568426 +0000 UTC m=+6338.141767695" Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.517258 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" podStartSLOduration=5.517240085 podStartE2EDuration="5.517240085s" podCreationTimestamp="2025-11-21 15:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:49:07.499405616 +0000 UTC m=+6338.151604875" watchObservedRunningTime="2025-11-21 15:49:07.517240085 +0000 UTC m=+6338.169439354" Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.923618 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.923906 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-central-agent" containerID="cri-o://10f2769edb6bff1efa8150cb1499cf56d794e1f63d857b5a733c5140d070f5ac" gracePeriod=30 Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.924359 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="proxy-httpd" containerID="cri-o://ccf49c063658e616267ad153063371a5c619ac0c643368688175cb3e476aa00c" gracePeriod=30 Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.924405 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="sg-core" containerID="cri-o://0eb665f9146b3fc46a3b8dc098286f69c26e6b62eed004d98eb3da56450dbbb1" gracePeriod=30 Nov 21 15:49:07 crc kubenswrapper[4774]: I1121 15:49:07.924437 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-notification-agent" containerID="cri-o://5bcb510b3f402253f6899262c01d3c0bd136c7177ef76053cab47f16e92642c1" gracePeriod=30 Nov 21 15:49:08 crc kubenswrapper[4774]: I1121 15:49:08.484568 4774 generic.go:334] "Generic (PLEG): container finished" podID="cf37bad3-5197-4496-996c-58807e46f313" containerID="ccf49c063658e616267ad153063371a5c619ac0c643368688175cb3e476aa00c" exitCode=0 Nov 21 15:49:08 crc kubenswrapper[4774]: I1121 15:49:08.484907 4774 generic.go:334] "Generic (PLEG): container finished" podID="cf37bad3-5197-4496-996c-58807e46f313" containerID="0eb665f9146b3fc46a3b8dc098286f69c26e6b62eed004d98eb3da56450dbbb1" exitCode=2 Nov 21 15:49:08 crc kubenswrapper[4774]: I1121 15:49:08.484813 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerDied","Data":"ccf49c063658e616267ad153063371a5c619ac0c643368688175cb3e476aa00c"} Nov 21 15:49:08 crc kubenswrapper[4774]: I1121 15:49:08.484960 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerDied","Data":"0eb665f9146b3fc46a3b8dc098286f69c26e6b62eed004d98eb3da56450dbbb1"} Nov 21 15:49:08 crc kubenswrapper[4774]: I1121 15:49:08.488182 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-scheduler-0" event={"ID":"fc220fc3-ac7d-4c67-8b6c-cb3f15e61099","Type":"ContainerStarted","Data":"d28a96ec42de8a4599911b8ce7bc727d4d3cc4e272228984a4cde985abe38f9c"} Nov 21 15:49:09 crc kubenswrapper[4774]: I1121 15:49:09.502045 4774 generic.go:334] "Generic (PLEG): container finished" podID="cf37bad3-5197-4496-996c-58807e46f313" containerID="10f2769edb6bff1efa8150cb1499cf56d794e1f63d857b5a733c5140d070f5ac" exitCode=0 Nov 21 15:49:09 crc kubenswrapper[4774]: I1121 15:49:09.502135 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerDied","Data":"10f2769edb6bff1efa8150cb1499cf56d794e1f63d857b5a733c5140d070f5ac"} Nov 21 15:49:09 crc kubenswrapper[4774]: I1121 15:49:09.529106 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-scheduler-0" podStartSLOduration=4.757516958 podStartE2EDuration="7.529087635s" podCreationTimestamp="2025-11-21 15:49:02 +0000 UTC" firstStartedPulling="2025-11-21 15:49:03.844751906 +0000 UTC m=+6334.496951165" lastFinishedPulling="2025-11-21 15:49:06.616322583 +0000 UTC m=+6337.268521842" observedRunningTime="2025-11-21 15:49:09.524423892 +0000 UTC m=+6340.176623151" watchObservedRunningTime="2025-11-21 15:49:09.529087635 +0000 UTC m=+6340.181286894" Nov 21 15:49:11 crc kubenswrapper[4774]: I1121 15:49:11.525554 4774 generic.go:334] "Generic (PLEG): container finished" podID="cf37bad3-5197-4496-996c-58807e46f313" containerID="5bcb510b3f402253f6899262c01d3c0bd136c7177ef76053cab47f16e92642c1" exitCode=0 Nov 21 15:49:11 crc kubenswrapper[4774]: I1121 15:49:11.525636 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerDied","Data":"5bcb510b3f402253f6899262c01d3c0bd136c7177ef76053cab47f16e92642c1"} Nov 21 15:49:13 crc kubenswrapper[4774]: I1121 15:49:13.056514 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-scheduler-0" Nov 21 15:49:13 crc kubenswrapper[4774]: I1121 15:49:13.136093 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:49:13 crc kubenswrapper[4774]: I1121 15:49:13.222104 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6574d9755f-vzbzr"] Nov 21 15:49:13 crc kubenswrapper[4774]: I1121 15:49:13.222617 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="dnsmasq-dns" containerID="cri-o://ab84a8f0e3bf4ddc97f72d6157665cbe2c843e88203e5f9ac91f8776afa67afc" gracePeriod=10 Nov 21 15:49:13 crc kubenswrapper[4774]: I1121 15:49:13.845931 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.1.78:5353: connect: connection refused" Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.568493 4774 generic.go:334] "Generic (PLEG): container finished" podID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerID="ab84a8f0e3bf4ddc97f72d6157665cbe2c843e88203e5f9ac91f8776afa67afc" exitCode=0 Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.568545 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" event={"ID":"b3a600c2-6589-4fc3-a96a-2211c34f0c68","Type":"ContainerDied","Data":"ab84a8f0e3bf4ddc97f72d6157665cbe2c843e88203e5f9ac91f8776afa67afc"} Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.906421 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997082 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-log-httpd\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997270 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-combined-ca-bundle\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997362 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-scripts\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997402 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v698\" (UniqueName: \"kubernetes.io/projected/cf37bad3-5197-4496-996c-58807e46f313-kube-api-access-4v698\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997426 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-config-data\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997561 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-run-httpd\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.997598 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-sg-core-conf-yaml\") pod \"cf37bad3-5197-4496-996c-58807e46f313\" (UID: \"cf37bad3-5197-4496-996c-58807e46f313\") " Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.998261 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:49:15 crc kubenswrapper[4774]: I1121 15:49:15.998322 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.005406 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-scripts" (OuterVolumeSpecName: "scripts") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.007236 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf37bad3-5197-4496-996c-58807e46f313-kube-api-access-4v698" (OuterVolumeSpecName: "kube-api-access-4v698") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "kube-api-access-4v698". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.057978 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.080939 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.099607 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.099647 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.099660 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cf37bad3-5197-4496-996c-58807e46f313-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.099674 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.099685 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.099693 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v698\" (UniqueName: \"kubernetes.io/projected/cf37bad3-5197-4496-996c-58807e46f313-kube-api-access-4v698\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.111739 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-config-data" (OuterVolumeSpecName: "config-data") pod "cf37bad3-5197-4496-996c-58807e46f313" (UID: "cf37bad3-5197-4496-996c-58807e46f313"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.201947 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf37bad3-5197-4496-996c-58807e46f313-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.485598 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611371 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611455 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-dns-svc\") pod \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611351 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6574d9755f-vzbzr" event={"ID":"b3a600c2-6589-4fc3-a96a-2211c34f0c68","Type":"ContainerDied","Data":"a166851c1733d8ed2e221c4236ce331dd8b03a7afda5747d34c5b5143e7e6619"} Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611601 4774 scope.go:117] "RemoveContainer" containerID="ab84a8f0e3bf4ddc97f72d6157665cbe2c843e88203e5f9ac91f8776afa67afc" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611641 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhpqt\" (UniqueName: \"kubernetes.io/projected/b3a600c2-6589-4fc3-a96a-2211c34f0c68-kube-api-access-rhpqt\") pod \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611749 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-sb\") pod \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611803 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-config\") pod \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.611889 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-nb\") pod \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\" (UID: \"b3a600c2-6589-4fc3-a96a-2211c34f0c68\") " Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.619038 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3a600c2-6589-4fc3-a96a-2211c34f0c68-kube-api-access-rhpqt" (OuterVolumeSpecName: "kube-api-access-rhpqt") pod "b3a600c2-6589-4fc3-a96a-2211c34f0c68" (UID: "b3a600c2-6589-4fc3-a96a-2211c34f0c68"). InnerVolumeSpecName "kube-api-access-rhpqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.619125 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cf37bad3-5197-4496-996c-58807e46f313","Type":"ContainerDied","Data":"14743217648f7abe3134b31da8693755ca5354d0a663d40d850fe12622af9aa6"} Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.619234 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.659208 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.667402 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.681618 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b3a600c2-6589-4fc3-a96a-2211c34f0c68" (UID: "b3a600c2-6589-4fc3-a96a-2211c34f0c68"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688305 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:16 crc kubenswrapper[4774]: E1121 15:49:16.688758 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="proxy-httpd" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688778 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="proxy-httpd" Nov 21 15:49:16 crc kubenswrapper[4774]: E1121 15:49:16.688796 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-notification-agent" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688805 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-notification-agent" Nov 21 15:49:16 crc kubenswrapper[4774]: E1121 15:49:16.688851 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-central-agent" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688860 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-central-agent" Nov 21 15:49:16 crc kubenswrapper[4774]: E1121 15:49:16.688884 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="init" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688891 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="init" Nov 21 15:49:16 crc kubenswrapper[4774]: E1121 15:49:16.688926 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="sg-core" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688934 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="sg-core" Nov 21 15:49:16 crc kubenswrapper[4774]: E1121 15:49:16.688949 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="dnsmasq-dns" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.688955 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="dnsmasq-dns" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.689153 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" containerName="dnsmasq-dns" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.689179 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="proxy-httpd" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.689198 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-notification-agent" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.689215 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="ceilometer-central-agent" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.689228 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf37bad3-5197-4496-996c-58807e46f313" containerName="sg-core" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.690231 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b3a600c2-6589-4fc3-a96a-2211c34f0c68" (UID: "b3a600c2-6589-4fc3-a96a-2211c34f0c68"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.691477 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.695224 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.695845 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-config" (OuterVolumeSpecName: "config") pod "b3a600c2-6589-4fc3-a96a-2211c34f0c68" (UID: "b3a600c2-6589-4fc3-a96a-2211c34f0c68"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.696216 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717036 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-config-data\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717176 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-run-httpd\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717199 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-log-httpd\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717383 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717449 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkdfh\" (UniqueName: \"kubernetes.io/projected/885ff8dc-a71c-41d2-8183-6cbfe81973ea-kube-api-access-bkdfh\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717477 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717720 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-scripts\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717848 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhpqt\" (UniqueName: \"kubernetes.io/projected/b3a600c2-6589-4fc3-a96a-2211c34f0c68-kube-api-access-rhpqt\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717868 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717880 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.717891 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.718495 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.721460 4774 scope.go:117] "RemoveContainer" containerID="ca6ed49cafb923ea60a77da9a1d906fa2b68a9bf1c9ed47c71a243af72f36628" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.730981 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b3a600c2-6589-4fc3-a96a-2211c34f0c68" (UID: "b3a600c2-6589-4fc3-a96a-2211c34f0c68"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819611 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-run-httpd\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819648 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-log-httpd\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819741 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819789 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkdfh\" (UniqueName: \"kubernetes.io/projected/885ff8dc-a71c-41d2-8183-6cbfe81973ea-kube-api-access-bkdfh\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819807 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819912 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-scripts\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.819961 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-config-data\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.820015 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b3a600c2-6589-4fc3-a96a-2211c34f0c68-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.821065 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-run-httpd\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.821246 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-log-httpd\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.823931 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.825374 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.825380 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-scripts\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.825791 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-config-data\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.839756 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkdfh\" (UniqueName: \"kubernetes.io/projected/885ff8dc-a71c-41d2-8183-6cbfe81973ea-kube-api-access-bkdfh\") pod \"ceilometer-0\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " pod="openstack/ceilometer-0" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.859852 4774 scope.go:117] "RemoveContainer" containerID="ccf49c063658e616267ad153063371a5c619ac0c643368688175cb3e476aa00c" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.921597 4774 scope.go:117] "RemoveContainer" containerID="0eb665f9146b3fc46a3b8dc098286f69c26e6b62eed004d98eb3da56450dbbb1" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.947379 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6574d9755f-vzbzr"] Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.949924 4774 scope.go:117] "RemoveContainer" containerID="5bcb510b3f402253f6899262c01d3c0bd136c7177ef76053cab47f16e92642c1" Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.955687 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6574d9755f-vzbzr"] Nov 21 15:49:16 crc kubenswrapper[4774]: I1121 15:49:16.978397 4774 scope.go:117] "RemoveContainer" containerID="10f2769edb6bff1efa8150cb1499cf56d794e1f63d857b5a733c5140d070f5ac" Nov 21 15:49:17 crc kubenswrapper[4774]: I1121 15:49:17.010392 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:17 crc kubenswrapper[4774]: I1121 15:49:17.576327 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:17 crc kubenswrapper[4774]: I1121 15:49:17.630629 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerStarted","Data":"db7de8ffbf6fa23fdeb1d3faed049501246720e21c43d1bd43b32ba432a3ab42"} Nov 21 15:49:18 crc kubenswrapper[4774]: I1121 15:49:18.108757 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3a600c2-6589-4fc3-a96a-2211c34f0c68" path="/var/lib/kubelet/pods/b3a600c2-6589-4fc3-a96a-2211c34f0c68/volumes" Nov 21 15:49:18 crc kubenswrapper[4774]: I1121 15:49:18.109729 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf37bad3-5197-4496-996c-58807e46f313" path="/var/lib/kubelet/pods/cf37bad3-5197-4496-996c-58807e46f313/volumes" Nov 21 15:49:18 crc kubenswrapper[4774]: I1121 15:49:18.902722 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:23 crc kubenswrapper[4774]: I1121 15:49:23.700751 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerStarted","Data":"a7f02202f7838a0767f54ea98b3c1bc41f7eff08c041412424d7bbc8042ef9cf"} Nov 21 15:49:24 crc kubenswrapper[4774]: I1121 15:49:24.726779 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"7187f8c3-a88d-4b53-9f36-3d3aaa44a426","Type":"ContainerStarted","Data":"1ff692b33339878f74365c1d5a1990800c2a944a3c3bcaeedb9154e343190567"} Nov 21 15:49:24 crc kubenswrapper[4774]: I1121 15:49:24.814591 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-scheduler-0" Nov 21 15:49:24 crc kubenswrapper[4774]: I1121 15:49:24.827040 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/manila-api-0" Nov 21 15:49:25 crc kubenswrapper[4774]: I1121 15:49:25.739176 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/manila-share-share1-0" event={"ID":"7187f8c3-a88d-4b53-9f36-3d3aaa44a426","Type":"ContainerStarted","Data":"374bf128829fff842facee27625884507686629efade91a3ca6ff5c62ad99fef"} Nov 21 15:49:25 crc kubenswrapper[4774]: I1121 15:49:25.771382 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/manila-share-share1-0" podStartSLOduration=4.453450441 podStartE2EDuration="23.771359913s" podCreationTimestamp="2025-11-21 15:49:02 +0000 UTC" firstStartedPulling="2025-11-21 15:49:03.975593645 +0000 UTC m=+6334.627792904" lastFinishedPulling="2025-11-21 15:49:23.293503117 +0000 UTC m=+6353.945702376" observedRunningTime="2025-11-21 15:49:25.758746063 +0000 UTC m=+6356.410945332" watchObservedRunningTime="2025-11-21 15:49:25.771359913 +0000 UTC m=+6356.423559172" Nov 21 15:49:26 crc kubenswrapper[4774]: I1121 15:49:26.758494 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerStarted","Data":"3f8e1391c07844a1c4fd1e9a49b6bc2e2798501ee5bec0f9792fcc5fee8b9c16"} Nov 21 15:49:28 crc kubenswrapper[4774]: I1121 15:49:28.787965 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerStarted","Data":"5b8a0972b0db6bf15c8610c5f2ebfa8bffa401a99320fd12fdb9edcea6806e7b"} Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.600597 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.600673 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.600720 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.601394 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"af288035ac19f9f50a6dc5ab6216f56c123497fcf2d36d36aec3fddf5ed00acc"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.601464 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://af288035ac19f9f50a6dc5ab6216f56c123497fcf2d36d36aec3fddf5ed00acc" gracePeriod=600 Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.801694 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="af288035ac19f9f50a6dc5ab6216f56c123497fcf2d36d36aec3fddf5ed00acc" exitCode=0 Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.801749 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"af288035ac19f9f50a6dc5ab6216f56c123497fcf2d36d36aec3fddf5ed00acc"} Nov 21 15:49:29 crc kubenswrapper[4774]: I1121 15:49:29.801847 4774 scope.go:117] "RemoveContainer" containerID="6c66c6746d2051300120112c5c86095866ce2648592ffc395135049d44df1f70" Nov 21 15:49:30 crc kubenswrapper[4774]: I1121 15:49:30.814626 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268"} Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.842060 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerStarted","Data":"a45a4913c485dfd954145acb34b0c835482ea8ad8124dd732f2aa410d6dff365"} Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.842745 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.842455 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="proxy-httpd" containerID="cri-o://a45a4913c485dfd954145acb34b0c835482ea8ad8124dd732f2aa410d6dff365" gracePeriod=30 Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.842471 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="sg-core" containerID="cri-o://5b8a0972b0db6bf15c8610c5f2ebfa8bffa401a99320fd12fdb9edcea6806e7b" gracePeriod=30 Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.842483 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-notification-agent" containerID="cri-o://3f8e1391c07844a1c4fd1e9a49b6bc2e2798501ee5bec0f9792fcc5fee8b9c16" gracePeriod=30 Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.842202 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-central-agent" containerID="cri-o://a7f02202f7838a0767f54ea98b3c1bc41f7eff08c041412424d7bbc8042ef9cf" gracePeriod=30 Nov 21 15:49:32 crc kubenswrapper[4774]: I1121 15:49:32.872622 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.113976215 podStartE2EDuration="16.872602603s" podCreationTimestamp="2025-11-21 15:49:16 +0000 UTC" firstStartedPulling="2025-11-21 15:49:17.591261208 +0000 UTC m=+6348.243460467" lastFinishedPulling="2025-11-21 15:49:31.349887596 +0000 UTC m=+6362.002086855" observedRunningTime="2025-11-21 15:49:32.866629913 +0000 UTC m=+6363.518829182" watchObservedRunningTime="2025-11-21 15:49:32.872602603 +0000 UTC m=+6363.524801872" Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.044901 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/manila-share-share1-0" Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.853684 4774 generic.go:334] "Generic (PLEG): container finished" podID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerID="a45a4913c485dfd954145acb34b0c835482ea8ad8124dd732f2aa410d6dff365" exitCode=0 Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.854003 4774 generic.go:334] "Generic (PLEG): container finished" podID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerID="5b8a0972b0db6bf15c8610c5f2ebfa8bffa401a99320fd12fdb9edcea6806e7b" exitCode=2 Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.854017 4774 generic.go:334] "Generic (PLEG): container finished" podID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerID="3f8e1391c07844a1c4fd1e9a49b6bc2e2798501ee5bec0f9792fcc5fee8b9c16" exitCode=0 Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.854027 4774 generic.go:334] "Generic (PLEG): container finished" podID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerID="a7f02202f7838a0767f54ea98b3c1bc41f7eff08c041412424d7bbc8042ef9cf" exitCode=0 Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.853731 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerDied","Data":"a45a4913c485dfd954145acb34b0c835482ea8ad8124dd732f2aa410d6dff365"} Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.854056 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerDied","Data":"5b8a0972b0db6bf15c8610c5f2ebfa8bffa401a99320fd12fdb9edcea6806e7b"} Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.854068 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerDied","Data":"3f8e1391c07844a1c4fd1e9a49b6bc2e2798501ee5bec0f9792fcc5fee8b9c16"} Nov 21 15:49:33 crc kubenswrapper[4774]: I1121 15:49:33.854078 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerDied","Data":"a7f02202f7838a0767f54ea98b3c1bc41f7eff08c041412424d7bbc8042ef9cf"} Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.242135 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.304307 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-config-data\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.305027 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-log-httpd\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.305088 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkdfh\" (UniqueName: \"kubernetes.io/projected/885ff8dc-a71c-41d2-8183-6cbfe81973ea-kube-api-access-bkdfh\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.305179 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-scripts\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.305248 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-combined-ca-bundle\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.305292 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-sg-core-conf-yaml\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.305345 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-run-httpd\") pod \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\" (UID: \"885ff8dc-a71c-41d2-8183-6cbfe81973ea\") " Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.307365 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.310925 4774 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-log-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.313914 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.315058 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-scripts" (OuterVolumeSpecName: "scripts") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.315096 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/885ff8dc-a71c-41d2-8183-6cbfe81973ea-kube-api-access-bkdfh" (OuterVolumeSpecName: "kube-api-access-bkdfh") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "kube-api-access-bkdfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.357977 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.411787 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.415525 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkdfh\" (UniqueName: \"kubernetes.io/projected/885ff8dc-a71c-41d2-8183-6cbfe81973ea-kube-api-access-bkdfh\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.415554 4774 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-scripts\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.415563 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.415571 4774 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.415580 4774 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/885ff8dc-a71c-41d2-8183-6cbfe81973ea-run-httpd\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.455706 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-config-data" (OuterVolumeSpecName: "config-data") pod "885ff8dc-a71c-41d2-8183-6cbfe81973ea" (UID: "885ff8dc-a71c-41d2-8183-6cbfe81973ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.517891 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/885ff8dc-a71c-41d2-8183-6cbfe81973ea-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.740878 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/manila-share-share1-0" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.864797 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"885ff8dc-a71c-41d2-8183-6cbfe81973ea","Type":"ContainerDied","Data":"db7de8ffbf6fa23fdeb1d3faed049501246720e21c43d1bd43b32ba432a3ab42"} Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.865054 4774 scope.go:117] "RemoveContainer" containerID="a45a4913c485dfd954145acb34b0c835482ea8ad8124dd732f2aa410d6dff365" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.865297 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.893547 4774 scope.go:117] "RemoveContainer" containerID="5b8a0972b0db6bf15c8610c5f2ebfa8bffa401a99320fd12fdb9edcea6806e7b" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.915029 4774 scope.go:117] "RemoveContainer" containerID="3f8e1391c07844a1c4fd1e9a49b6bc2e2798501ee5bec0f9792fcc5fee8b9c16" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.915801 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.933246 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.943092 4774 scope.go:117] "RemoveContainer" containerID="a7f02202f7838a0767f54ea98b3c1bc41f7eff08c041412424d7bbc8042ef9cf" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.981793 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:34 crc kubenswrapper[4774]: E1121 15:49:34.983062 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-notification-agent" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983083 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-notification-agent" Nov 21 15:49:34 crc kubenswrapper[4774]: E1121 15:49:34.983105 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="proxy-httpd" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983111 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="proxy-httpd" Nov 21 15:49:34 crc kubenswrapper[4774]: E1121 15:49:34.983144 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="sg-core" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983150 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="sg-core" Nov 21 15:49:34 crc kubenswrapper[4774]: E1121 15:49:34.983195 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-central-agent" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983201 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-central-agent" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983719 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-central-agent" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983742 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="proxy-httpd" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983769 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="sg-core" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.983804 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" containerName="ceilometer-notification-agent" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.990175 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.994739 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.995151 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Nov 21 15:49:34 crc kubenswrapper[4774]: I1121 15:49:34.998399 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028579 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-scripts\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028660 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-config-data\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028699 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028719 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvt2s\" (UniqueName: \"kubernetes.io/projected/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-kube-api-access-zvt2s\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028742 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-log-httpd\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028813 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.028872 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-run-httpd\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.130484 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-log-httpd\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.130628 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.130694 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-run-httpd\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.130763 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-scripts\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.131041 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-config-data\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.131089 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.131109 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvt2s\" (UniqueName: \"kubernetes.io/projected/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-kube-api-access-zvt2s\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.131258 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-log-httpd\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.132505 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-run-httpd\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.137246 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.138210 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-scripts\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.139756 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-config-data\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.140348 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.154302 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvt2s\" (UniqueName: \"kubernetes.io/projected/ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b-kube-api-access-zvt2s\") pod \"ceilometer-0\" (UID: \"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b\") " pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.327432 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Nov 21 15:49:35 crc kubenswrapper[4774]: I1121 15:49:35.903995 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Nov 21 15:49:35 crc kubenswrapper[4774]: W1121 15:49:35.908373 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee6332a4_c9aa_46fa_9fd1_e7c1a0a4db1b.slice/crio-ad1e3a90389341960643f2d0ef5916be6ee47605337bb64e6f467237ac56e66d WatchSource:0}: Error finding container ad1e3a90389341960643f2d0ef5916be6ee47605337bb64e6f467237ac56e66d: Status 404 returned error can't find the container with id ad1e3a90389341960643f2d0ef5916be6ee47605337bb64e6f467237ac56e66d Nov 21 15:49:36 crc kubenswrapper[4774]: I1121 15:49:36.106414 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="885ff8dc-a71c-41d2-8183-6cbfe81973ea" path="/var/lib/kubelet/pods/885ff8dc-a71c-41d2-8183-6cbfe81973ea/volumes" Nov 21 15:49:36 crc kubenswrapper[4774]: I1121 15:49:36.883929 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b","Type":"ContainerStarted","Data":"ad1e3a90389341960643f2d0ef5916be6ee47605337bb64e6f467237ac56e66d"} Nov 21 15:49:37 crc kubenswrapper[4774]: I1121 15:49:37.895627 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b","Type":"ContainerStarted","Data":"99cd7e4b0ab3069082a50bce452130ec7750250374f6dfbfa96706f624e17422"} Nov 21 15:49:38 crc kubenswrapper[4774]: I1121 15:49:38.909879 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b","Type":"ContainerStarted","Data":"1e68ec4bf3a4e59a497703c7d94087f114aad60bfa83ada87a3725d18a1ee6f3"} Nov 21 15:49:39 crc kubenswrapper[4774]: I1121 15:49:39.919418 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b","Type":"ContainerStarted","Data":"48b44de7ca82b9b4c7f62d402424040b1edf608e10acb9a037eeadb7d2d1402d"} Nov 21 15:49:41 crc kubenswrapper[4774]: I1121 15:49:41.942440 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b","Type":"ContainerStarted","Data":"fb8400af67efa0dfbb250f4a0e8cab14390e2a6635a4e02d7cd64da091097601"} Nov 21 15:49:41 crc kubenswrapper[4774]: I1121 15:49:41.944450 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Nov 21 15:50:05 crc kubenswrapper[4774]: I1121 15:50:05.342114 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Nov 21 15:50:05 crc kubenswrapper[4774]: I1121 15:50:05.373744 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=26.197512847 podStartE2EDuration="31.373722371s" podCreationTimestamp="2025-11-21 15:49:34 +0000 UTC" firstStartedPulling="2025-11-21 15:49:35.910894404 +0000 UTC m=+6366.563093663" lastFinishedPulling="2025-11-21 15:49:41.087103928 +0000 UTC m=+6371.739303187" observedRunningTime="2025-11-21 15:49:41.972068875 +0000 UTC m=+6372.624268134" watchObservedRunningTime="2025-11-21 15:50:05.373722371 +0000 UTC m=+6396.025921630" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.198215 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dc8bc85cc-8g2nb"] Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.206365 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.208278 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.224975 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dc8bc85cc-8g2nb"] Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.353503 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-sb\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.353566 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrh42\" (UniqueName: \"kubernetes.io/projected/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-kube-api-access-jrh42\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.353615 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-nb\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.353723 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-openstack-cell1\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.353751 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-config\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.353801 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-dns-svc\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.455670 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-openstack-cell1\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.456064 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-config\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.456143 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-dns-svc\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.456201 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-sb\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.456230 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrh42\" (UniqueName: \"kubernetes.io/projected/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-kube-api-access-jrh42\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.456272 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-nb\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.457122 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-nb\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.457681 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-openstack-cell1\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.458533 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-config\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.458863 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-dns-svc\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.459004 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-sb\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.481507 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrh42\" (UniqueName: \"kubernetes.io/projected/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-kube-api-access-jrh42\") pod \"dnsmasq-dns-dc8bc85cc-8g2nb\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:21 crc kubenswrapper[4774]: I1121 15:50:21.532400 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:22 crc kubenswrapper[4774]: I1121 15:50:22.112443 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dc8bc85cc-8g2nb"] Nov 21 15:50:22 crc kubenswrapper[4774]: I1121 15:50:22.325794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" event={"ID":"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec","Type":"ContainerStarted","Data":"65e9a69ca86c21b0e7b87f9d9086b0e8551856f22b60ac621a1e8e1e61f83843"} Nov 21 15:50:22 crc kubenswrapper[4774]: E1121 15:50:22.597933 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd65e6dc_9cad_4cff_9510_2d9b2faf5bec.slice/crio-01c115281f1f13197b1ea31326f15e77050df356d19c4c4d0017132236b50d8d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd65e6dc_9cad_4cff_9510_2d9b2faf5bec.slice/crio-conmon-01c115281f1f13197b1ea31326f15e77050df356d19c4c4d0017132236b50d8d.scope\": RecentStats: unable to find data in memory cache]" Nov 21 15:50:23 crc kubenswrapper[4774]: I1121 15:50:23.336472 4774 generic.go:334] "Generic (PLEG): container finished" podID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerID="01c115281f1f13197b1ea31326f15e77050df356d19c4c4d0017132236b50d8d" exitCode=0 Nov 21 15:50:23 crc kubenswrapper[4774]: I1121 15:50:23.336774 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" event={"ID":"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec","Type":"ContainerDied","Data":"01c115281f1f13197b1ea31326f15e77050df356d19c4c4d0017132236b50d8d"} Nov 21 15:50:24 crc kubenswrapper[4774]: I1121 15:50:24.348218 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" event={"ID":"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec","Type":"ContainerStarted","Data":"57a43c5933a3772f2f642fc5924334911a278098d25159c483c79a52c12a0ef5"} Nov 21 15:50:24 crc kubenswrapper[4774]: I1121 15:50:24.349699 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:24 crc kubenswrapper[4774]: I1121 15:50:24.371459 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" podStartSLOduration=3.371439294 podStartE2EDuration="3.371439294s" podCreationTimestamp="2025-11-21 15:50:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:50:24.369867409 +0000 UTC m=+6415.022066688" watchObservedRunningTime="2025-11-21 15:50:24.371439294 +0000 UTC m=+6415.023638553" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.534543 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.601041 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ffc4959f5-pb5rf"] Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.601283 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerName="dnsmasq-dns" containerID="cri-o://a4407449da6b487a8d8d14f62cd079f7e36906f3c8f6e840bf5dbf748acae367" gracePeriod=10 Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.766888 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fb67cb889-62d6d"] Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.768980 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.784683 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fb67cb889-62d6d"] Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.912322 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-ovsdbserver-sb\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.912414 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-dns-svc\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.912451 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfwsp\" (UniqueName: \"kubernetes.io/projected/c334c682-df4a-4fc9-9672-651075da5c61-kube-api-access-sfwsp\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.912615 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-openstack-cell1\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.912763 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-ovsdbserver-nb\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:31 crc kubenswrapper[4774]: I1121 15:50:31.912837 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-config\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.014523 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-ovsdbserver-nb\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.014854 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-config\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.014956 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-ovsdbserver-sb\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.014981 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-dns-svc\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.015007 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfwsp\" (UniqueName: \"kubernetes.io/projected/c334c682-df4a-4fc9-9672-651075da5c61-kube-api-access-sfwsp\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.015053 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-openstack-cell1\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.015585 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-ovsdbserver-nb\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.015703 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-openstack-cell1\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.016242 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-dns-svc\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.016301 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-ovsdbserver-sb\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.016546 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c334c682-df4a-4fc9-9672-651075da5c61-config\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.054395 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfwsp\" (UniqueName: \"kubernetes.io/projected/c334c682-df4a-4fc9-9672-651075da5c61-kube-api-access-sfwsp\") pod \"dnsmasq-dns-7fb67cb889-62d6d\" (UID: \"c334c682-df4a-4fc9-9672-651075da5c61\") " pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.108935 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.433342 4774 generic.go:334] "Generic (PLEG): container finished" podID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerID="a4407449da6b487a8d8d14f62cd079f7e36906f3c8f6e840bf5dbf748acae367" exitCode=0 Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.433800 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" event={"ID":"679926e0-e43b-4657-b63d-4ff6af9ed155","Type":"ContainerDied","Data":"a4407449da6b487a8d8d14f62cd079f7e36906f3c8f6e840bf5dbf748acae367"} Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.433871 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" event={"ID":"679926e0-e43b-4657-b63d-4ff6af9ed155","Type":"ContainerDied","Data":"b5e88413aafbb9d664749e5f43a3287c3e1852cd35c2f8bff661613f49dfb4d3"} Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.433889 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5e88413aafbb9d664749e5f43a3287c3e1852cd35c2f8bff661613f49dfb4d3" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.454474 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.535988 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w46tt\" (UniqueName: \"kubernetes.io/projected/679926e0-e43b-4657-b63d-4ff6af9ed155-kube-api-access-w46tt\") pod \"679926e0-e43b-4657-b63d-4ff6af9ed155\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.536193 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-sb\") pod \"679926e0-e43b-4657-b63d-4ff6af9ed155\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.536234 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-config\") pod \"679926e0-e43b-4657-b63d-4ff6af9ed155\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.536252 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-dns-svc\") pod \"679926e0-e43b-4657-b63d-4ff6af9ed155\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.536286 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-nb\") pod \"679926e0-e43b-4657-b63d-4ff6af9ed155\" (UID: \"679926e0-e43b-4657-b63d-4ff6af9ed155\") " Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.541041 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/679926e0-e43b-4657-b63d-4ff6af9ed155-kube-api-access-w46tt" (OuterVolumeSpecName: "kube-api-access-w46tt") pod "679926e0-e43b-4657-b63d-4ff6af9ed155" (UID: "679926e0-e43b-4657-b63d-4ff6af9ed155"). InnerVolumeSpecName "kube-api-access-w46tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.607705 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "679926e0-e43b-4657-b63d-4ff6af9ed155" (UID: "679926e0-e43b-4657-b63d-4ff6af9ed155"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.634132 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "679926e0-e43b-4657-b63d-4ff6af9ed155" (UID: "679926e0-e43b-4657-b63d-4ff6af9ed155"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.635374 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "679926e0-e43b-4657-b63d-4ff6af9ed155" (UID: "679926e0-e43b-4657-b63d-4ff6af9ed155"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.639220 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.639269 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.639280 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.639292 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w46tt\" (UniqueName: \"kubernetes.io/projected/679926e0-e43b-4657-b63d-4ff6af9ed155-kube-api-access-w46tt\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.641245 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-config" (OuterVolumeSpecName: "config") pod "679926e0-e43b-4657-b63d-4ff6af9ed155" (UID: "679926e0-e43b-4657-b63d-4ff6af9ed155"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.741969 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/679926e0-e43b-4657-b63d-4ff6af9ed155-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:32 crc kubenswrapper[4774]: I1121 15:50:32.756076 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fb67cb889-62d6d"] Nov 21 15:50:32 crc kubenswrapper[4774]: W1121 15:50:32.770107 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc334c682_df4a_4fc9_9672_651075da5c61.slice/crio-facfef014d526248186ea6158cc3a8dbf91d9fe46e94dc4acefd0d501630e7e2 WatchSource:0}: Error finding container facfef014d526248186ea6158cc3a8dbf91d9fe46e94dc4acefd0d501630e7e2: Status 404 returned error can't find the container with id facfef014d526248186ea6158cc3a8dbf91d9fe46e94dc4acefd0d501630e7e2 Nov 21 15:50:33 crc kubenswrapper[4774]: I1121 15:50:33.447814 4774 generic.go:334] "Generic (PLEG): container finished" podID="c334c682-df4a-4fc9-9672-651075da5c61" containerID="6f375a8ec299af5d770de9d5763ff9f29d01e5c90da05442cc1418873318f923" exitCode=0 Nov 21 15:50:33 crc kubenswrapper[4774]: I1121 15:50:33.448252 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7ffc4959f5-pb5rf" Nov 21 15:50:33 crc kubenswrapper[4774]: I1121 15:50:33.447935 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" event={"ID":"c334c682-df4a-4fc9-9672-651075da5c61","Type":"ContainerDied","Data":"6f375a8ec299af5d770de9d5763ff9f29d01e5c90da05442cc1418873318f923"} Nov 21 15:50:33 crc kubenswrapper[4774]: I1121 15:50:33.448542 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" event={"ID":"c334c682-df4a-4fc9-9672-651075da5c61","Type":"ContainerStarted","Data":"facfef014d526248186ea6158cc3a8dbf91d9fe46e94dc4acefd0d501630e7e2"} Nov 21 15:50:33 crc kubenswrapper[4774]: I1121 15:50:33.548315 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7ffc4959f5-pb5rf"] Nov 21 15:50:33 crc kubenswrapper[4774]: I1121 15:50:33.558282 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7ffc4959f5-pb5rf"] Nov 21 15:50:34 crc kubenswrapper[4774]: I1121 15:50:34.107133 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" path="/var/lib/kubelet/pods/679926e0-e43b-4657-b63d-4ff6af9ed155/volumes" Nov 21 15:50:34 crc kubenswrapper[4774]: I1121 15:50:34.461611 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" event={"ID":"c334c682-df4a-4fc9-9672-651075da5c61","Type":"ContainerStarted","Data":"f44ce6855b0aedef0738d10074592ab7a478cd137bb8dcff977b5806f5d04dff"} Nov 21 15:50:34 crc kubenswrapper[4774]: I1121 15:50:34.461753 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:34 crc kubenswrapper[4774]: I1121 15:50:34.486407 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" podStartSLOduration=3.486390143 podStartE2EDuration="3.486390143s" podCreationTimestamp="2025-11-21 15:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 15:50:34.476282115 +0000 UTC m=+6425.128481374" watchObservedRunningTime="2025-11-21 15:50:34.486390143 +0000 UTC m=+6425.138589402" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.112164 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fb67cb889-62d6d" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.176123 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dc8bc85cc-8g2nb"] Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.176408 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerName="dnsmasq-dns" containerID="cri-o://57a43c5933a3772f2f642fc5924334911a278098d25159c483c79a52c12a0ef5" gracePeriod=10 Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.556640 4774 generic.go:334] "Generic (PLEG): container finished" podID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerID="57a43c5933a3772f2f642fc5924334911a278098d25159c483c79a52c12a0ef5" exitCode=0 Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.557008 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" event={"ID":"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec","Type":"ContainerDied","Data":"57a43c5933a3772f2f642fc5924334911a278098d25159c483c79a52c12a0ef5"} Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.704289 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.868882 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-dns-svc\") pod \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.868963 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-openstack-cell1\") pod \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.868986 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-sb\") pod \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.869046 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-config\") pod \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.869072 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrh42\" (UniqueName: \"kubernetes.io/projected/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-kube-api-access-jrh42\") pod \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.869104 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-nb\") pod \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\" (UID: \"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec\") " Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.876033 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-kube-api-access-jrh42" (OuterVolumeSpecName: "kube-api-access-jrh42") pod "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" (UID: "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec"). InnerVolumeSpecName "kube-api-access-jrh42". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.939200 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-config" (OuterVolumeSpecName: "config") pod "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" (UID: "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.940578 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-openstack-cell1" (OuterVolumeSpecName: "openstack-cell1") pod "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" (UID: "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec"). InnerVolumeSpecName "openstack-cell1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.943331 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" (UID: "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.943566 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" (UID: "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.949737 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" (UID: "fd65e6dc-9cad-4cff-9510-2d9b2faf5bec"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.971833 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrh42\" (UniqueName: \"kubernetes.io/projected/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-kube-api-access-jrh42\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.971889 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.971903 4774 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-dns-svc\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.971912 4774 reconciler_common.go:293] "Volume detached for volume \"openstack-cell1\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.971919 4774 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:42 crc kubenswrapper[4774]: I1121 15:50:42.971928 4774 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec-config\") on node \"crc\" DevicePath \"\"" Nov 21 15:50:43 crc kubenswrapper[4774]: I1121 15:50:43.571126 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" event={"ID":"fd65e6dc-9cad-4cff-9510-2d9b2faf5bec","Type":"ContainerDied","Data":"65e9a69ca86c21b0e7b87f9d9086b0e8551856f22b60ac621a1e8e1e61f83843"} Nov 21 15:50:43 crc kubenswrapper[4774]: I1121 15:50:43.571189 4774 scope.go:117] "RemoveContainer" containerID="57a43c5933a3772f2f642fc5924334911a278098d25159c483c79a52c12a0ef5" Nov 21 15:50:43 crc kubenswrapper[4774]: I1121 15:50:43.571195 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dc8bc85cc-8g2nb" Nov 21 15:50:43 crc kubenswrapper[4774]: I1121 15:50:43.608115 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dc8bc85cc-8g2nb"] Nov 21 15:50:43 crc kubenswrapper[4774]: I1121 15:50:43.612153 4774 scope.go:117] "RemoveContainer" containerID="01c115281f1f13197b1ea31326f15e77050df356d19c4c4d0017132236b50d8d" Nov 21 15:50:43 crc kubenswrapper[4774]: I1121 15:50:43.618176 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dc8bc85cc-8g2nb"] Nov 21 15:50:44 crc kubenswrapper[4774]: I1121 15:50:44.104580 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" path="/var/lib/kubelet/pods/fd65e6dc-9cad-4cff-9510-2d9b2faf5bec/volumes" Nov 21 15:50:49 crc kubenswrapper[4774]: I1121 15:50:49.047993 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-create-9cpf9"] Nov 21 15:50:49 crc kubenswrapper[4774]: I1121 15:50:49.058118 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-create-9cpf9"] Nov 21 15:50:50 crc kubenswrapper[4774]: I1121 15:50:50.114969 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87c9a523-0280-46a5-98da-f9de20a6fbd1" path="/var/lib/kubelet/pods/87c9a523-0280-46a5-98da-f9de20a6fbd1/volumes" Nov 21 15:50:51 crc kubenswrapper[4774]: I1121 15:50:51.038924 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-666b-account-create-4zsxl"] Nov 21 15:50:51 crc kubenswrapper[4774]: I1121 15:50:51.055172 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-666b-account-create-4zsxl"] Nov 21 15:50:51 crc kubenswrapper[4774]: I1121 15:50:51.333201 4774 scope.go:117] "RemoveContainer" containerID="7a79cf03168020ffbb4bd320527ba574d937e4645bb7ef5d973921a0c49b91b1" Nov 21 15:50:52 crc kubenswrapper[4774]: I1121 15:50:52.107057 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1c02231-5d57-41bf-927b-e4443ef9bc99" path="/var/lib/kubelet/pods/a1c02231-5d57-41bf-927b-e4443ef9bc99/volumes" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.008236 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j"] Nov 21 15:50:53 crc kubenswrapper[4774]: E1121 15:50:53.008906 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerName="dnsmasq-dns" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.008918 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerName="dnsmasq-dns" Nov 21 15:50:53 crc kubenswrapper[4774]: E1121 15:50:53.008937 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerName="init" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.008943 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerName="init" Nov 21 15:50:53 crc kubenswrapper[4774]: E1121 15:50:53.008959 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerName="init" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.008965 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerName="init" Nov 21 15:50:53 crc kubenswrapper[4774]: E1121 15:50:53.008979 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerName="dnsmasq-dns" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.008984 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerName="dnsmasq-dns" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.009177 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd65e6dc-9cad-4cff-9510-2d9b2faf5bec" containerName="dnsmasq-dns" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.009203 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="679926e0-e43b-4657-b63d-4ff6af9ed155" containerName="dnsmasq-dns" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.009910 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.012312 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.012386 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.012501 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.013247 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.034083 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j"] Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.195429 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.195613 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.195748 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.195804 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7phnt\" (UniqueName: \"kubernetes.io/projected/3eaa2587-5f2d-4df9-a322-3261da7ca988-kube-api-access-7phnt\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.195869 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.298363 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.298460 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.298507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7phnt\" (UniqueName: \"kubernetes.io/projected/3eaa2587-5f2d-4df9-a322-3261da7ca988-kube-api-access-7phnt\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.298540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.298641 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.304523 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ceph\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.305339 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ssh-key\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.311917 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-inventory\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.316207 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-pre-adoption-validation-combined-ca-bundle\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.317458 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7phnt\" (UniqueName: \"kubernetes.io/projected/3eaa2587-5f2d-4df9-a322-3261da7ca988-kube-api-access-7phnt\") pod \"pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:53 crc kubenswrapper[4774]: I1121 15:50:53.373843 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:50:54 crc kubenswrapper[4774]: I1121 15:50:54.029426 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:50:54 crc kubenswrapper[4774]: I1121 15:50:54.032783 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j"] Nov 21 15:50:54 crc kubenswrapper[4774]: I1121 15:50:54.687037 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" event={"ID":"3eaa2587-5f2d-4df9-a322-3261da7ca988","Type":"ContainerStarted","Data":"ec864f1e152688def9758f82f0497a16225a2c53cad8445ac6844da8e9edb49d"} Nov 21 15:50:57 crc kubenswrapper[4774]: I1121 15:50:57.036393 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-persistence-db-create-clqtq"] Nov 21 15:50:57 crc kubenswrapper[4774]: I1121 15:50:57.048457 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-persistence-db-create-clqtq"] Nov 21 15:50:58 crc kubenswrapper[4774]: I1121 15:50:58.031747 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-8b7b-account-create-pmwb2"] Nov 21 15:50:58 crc kubenswrapper[4774]: I1121 15:50:58.045133 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-8b7b-account-create-pmwb2"] Nov 21 15:50:58 crc kubenswrapper[4774]: I1121 15:50:58.111604 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6f9b0d7-bd7c-4b92-8b65-f8c063e09197" path="/var/lib/kubelet/pods/b6f9b0d7-bd7c-4b92-8b65-f8c063e09197/volumes" Nov 21 15:50:58 crc kubenswrapper[4774]: I1121 15:50:58.113664 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f37cd5cb-111d-4928-9348-d029977285e6" path="/var/lib/kubelet/pods/f37cd5cb-111d-4928-9348-d029977285e6/volumes" Nov 21 15:51:09 crc kubenswrapper[4774]: E1121 15:51:09.933593 4774 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest" Nov 21 15:51:09 crc kubenswrapper[4774]: E1121 15:51:09.934234 4774 kuberuntime_manager.go:1274] "Unhandled Error" err=< Nov 21 15:51:09 crc kubenswrapper[4774]: container &Container{Name:pre-adoption-validation-openstack-pre-adoption-openstack-cell1,Image:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,Command:[],Args:[ansible-runner run /runner -p osp.edpm.pre_adoption_validation -i pre-adoption-validation-openstack-pre-adoption-openstack-cell1],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ANSIBLE_CALLBACKS_ENABLED,Value:ansible.posix.profile_tasks,ValueFrom:nil,},EnvVar{Name:ANSIBLE_CALLBACK_RESULT_FORMAT,Value:yaml,ValueFrom:nil,},EnvVar{Name:ANSIBLE_FORCE_COLOR,Value:True,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DISPLAY_ARGS_TO_STDOUT,Value:True,ValueFrom:nil,},EnvVar{Name:ANSIBLE_SSH_ARGS,Value:-C -o ControlMaster=auto -o ControlPersist=80s,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY,Value:1,ValueFrom:nil,},EnvVar{Name:RUNNER_PLAYBOOK,Value: Nov 21 15:51:09 crc kubenswrapper[4774]: osp.edpm.pre_adoption_validation Nov 21 15:51:09 crc kubenswrapper[4774]: Nov 21 15:51:09 crc kubenswrapper[4774]: ,ValueFrom:nil,},EnvVar{Name:RUNNER_EXTRA_VARS,Value: Nov 21 15:51:09 crc kubenswrapper[4774]: edpm_override_hosts: openstack-cell1 Nov 21 15:51:09 crc kubenswrapper[4774]: edpm_service_type: pre-adoption-validation Nov 21 15:51:09 crc kubenswrapper[4774]: edpm_services_override: [pre-adoption-validation] Nov 21 15:51:09 crc kubenswrapper[4774]: Nov 21 15:51:09 crc kubenswrapper[4774]: Nov 21 15:51:09 crc kubenswrapper[4774]: ,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ceph,ReadOnly:true,MountPath:/etc/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:pre-adoption-validation-combined-ca-bundle,ReadOnly:false,MountPath:/var/lib/openstack/cacerts/pre-adoption-validation,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/runner/env/ssh_key,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:inventory,ReadOnly:false,MountPath:/runner/inventory/hosts,SubPath:inventory,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7phnt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:openstack-aee-default-env,},Optional:*true,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j_openstack(3eaa2587-5f2d-4df9-a322-3261da7ca988): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Nov 21 15:51:09 crc kubenswrapper[4774]: > logger="UnhandledError" Nov 21 15:51:09 crc kubenswrapper[4774]: E1121 15:51:09.935425 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pre-adoption-validation-openstack-pre-adoption-openstack-cell1\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" podUID="3eaa2587-5f2d-4df9-a322-3261da7ca988" Nov 21 15:51:10 crc kubenswrapper[4774]: E1121 15:51:10.873273 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"pre-adoption-validation-openstack-pre-adoption-openstack-cell1\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest\\\"\"" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" podUID="3eaa2587-5f2d-4df9-a322-3261da7ca988" Nov 21 15:51:26 crc kubenswrapper[4774]: I1121 15:51:26.086551 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" event={"ID":"3eaa2587-5f2d-4df9-a322-3261da7ca988","Type":"ContainerStarted","Data":"9b4ddacb817474200c39fbb398a619ab504149b7e7a99ae0514af09cef226333"} Nov 21 15:51:26 crc kubenswrapper[4774]: I1121 15:51:26.109504 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" podStartSLOduration=3.419186756 podStartE2EDuration="34.109486025s" podCreationTimestamp="2025-11-21 15:50:52 +0000 UTC" firstStartedPulling="2025-11-21 15:50:54.029106823 +0000 UTC m=+6444.681306082" lastFinishedPulling="2025-11-21 15:51:24.719406082 +0000 UTC m=+6475.371605351" observedRunningTime="2025-11-21 15:51:26.105621174 +0000 UTC m=+6476.757820433" watchObservedRunningTime="2025-11-21 15:51:26.109486025 +0000 UTC m=+6476.761685304" Nov 21 15:51:39 crc kubenswrapper[4774]: I1121 15:51:39.239305 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eaa2587-5f2d-4df9-a322-3261da7ca988" containerID="9b4ddacb817474200c39fbb398a619ab504149b7e7a99ae0514af09cef226333" exitCode=0 Nov 21 15:51:39 crc kubenswrapper[4774]: I1121 15:51:39.239443 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" event={"ID":"3eaa2587-5f2d-4df9-a322-3261da7ca988","Type":"ContainerDied","Data":"9b4ddacb817474200c39fbb398a619ab504149b7e7a99ae0514af09cef226333"} Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.784017 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.884466 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7phnt\" (UniqueName: \"kubernetes.io/projected/3eaa2587-5f2d-4df9-a322-3261da7ca988-kube-api-access-7phnt\") pod \"3eaa2587-5f2d-4df9-a322-3261da7ca988\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.884569 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-inventory\") pod \"3eaa2587-5f2d-4df9-a322-3261da7ca988\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.884609 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-pre-adoption-validation-combined-ca-bundle\") pod \"3eaa2587-5f2d-4df9-a322-3261da7ca988\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.884766 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ceph\") pod \"3eaa2587-5f2d-4df9-a322-3261da7ca988\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.884889 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ssh-key\") pod \"3eaa2587-5f2d-4df9-a322-3261da7ca988\" (UID: \"3eaa2587-5f2d-4df9-a322-3261da7ca988\") " Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.890620 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eaa2587-5f2d-4df9-a322-3261da7ca988-kube-api-access-7phnt" (OuterVolumeSpecName: "kube-api-access-7phnt") pod "3eaa2587-5f2d-4df9-a322-3261da7ca988" (UID: "3eaa2587-5f2d-4df9-a322-3261da7ca988"). InnerVolumeSpecName "kube-api-access-7phnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.892303 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-pre-adoption-validation-combined-ca-bundle" (OuterVolumeSpecName: "pre-adoption-validation-combined-ca-bundle") pod "3eaa2587-5f2d-4df9-a322-3261da7ca988" (UID: "3eaa2587-5f2d-4df9-a322-3261da7ca988"). InnerVolumeSpecName "pre-adoption-validation-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.893668 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ceph" (OuterVolumeSpecName: "ceph") pod "3eaa2587-5f2d-4df9-a322-3261da7ca988" (UID: "3eaa2587-5f2d-4df9-a322-3261da7ca988"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.921019 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3eaa2587-5f2d-4df9-a322-3261da7ca988" (UID: "3eaa2587-5f2d-4df9-a322-3261da7ca988"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.928522 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-inventory" (OuterVolumeSpecName: "inventory") pod "3eaa2587-5f2d-4df9-a322-3261da7ca988" (UID: "3eaa2587-5f2d-4df9-a322-3261da7ca988"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.988797 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7phnt\" (UniqueName: \"kubernetes.io/projected/3eaa2587-5f2d-4df9-a322-3261da7ca988-kube-api-access-7phnt\") on node \"crc\" DevicePath \"\"" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.988871 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.988885 4774 reconciler_common.go:293] "Volume detached for volume \"pre-adoption-validation-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-pre-adoption-validation-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.988929 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 15:51:40 crc kubenswrapper[4774]: I1121 15:51:40.988946 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3eaa2587-5f2d-4df9-a322-3261da7ca988-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 15:51:41 crc kubenswrapper[4774]: I1121 15:51:41.267993 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" event={"ID":"3eaa2587-5f2d-4df9-a322-3261da7ca988","Type":"ContainerDied","Data":"ec864f1e152688def9758f82f0497a16225a2c53cad8445ac6844da8e9edb49d"} Nov 21 15:51:41 crc kubenswrapper[4774]: I1121 15:51:41.268050 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec864f1e152688def9758f82f0497a16225a2c53cad8445ac6844da8e9edb49d" Nov 21 15:51:41 crc kubenswrapper[4774]: I1121 15:51:41.268121 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j" Nov 21 15:51:45 crc kubenswrapper[4774]: I1121 15:51:45.046348 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/octavia-db-sync-79fml"] Nov 21 15:51:45 crc kubenswrapper[4774]: I1121 15:51:45.062128 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/octavia-db-sync-79fml"] Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.105644 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54619bd5-89f7-432a-8ad7-38574bfe4a1e" path="/var/lib/kubelet/pods/54619bd5-89f7-432a-8ad7-38574bfe4a1e/volumes" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.166366 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx"] Nov 21 15:51:46 crc kubenswrapper[4774]: E1121 15:51:46.166983 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eaa2587-5f2d-4df9-a322-3261da7ca988" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.167004 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eaa2587-5f2d-4df9-a322-3261da7ca988" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.167251 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3eaa2587-5f2d-4df9-a322-3261da7ca988" containerName="pre-adoption-validation-openstack-pre-adoption-openstack-cell1" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.168223 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.172493 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.173590 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.173756 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.173945 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.186798 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx"] Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.316696 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.316800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.317606 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.317735 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.318231 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbn6f\" (UniqueName: \"kubernetes.io/projected/90a66815-e9c7-4b6e-869e-661af63e3e00-kube-api-access-bbn6f\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.419864 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.419928 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.420031 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbn6f\" (UniqueName: \"kubernetes.io/projected/90a66815-e9c7-4b6e-869e-661af63e3e00-kube-api-access-bbn6f\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.420061 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.420114 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.425722 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ssh-key\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.425775 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-tripleo-cleanup-combined-ca-bundle\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.426291 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-inventory\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.427100 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ceph\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.442998 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbn6f\" (UniqueName: \"kubernetes.io/projected/90a66815-e9c7-4b6e-869e-661af63e3e00-kube-api-access-bbn6f\") pod \"tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:46 crc kubenswrapper[4774]: I1121 15:51:46.506112 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 15:51:47 crc kubenswrapper[4774]: I1121 15:51:47.108203 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx"] Nov 21 15:51:47 crc kubenswrapper[4774]: I1121 15:51:47.339219 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" event={"ID":"90a66815-e9c7-4b6e-869e-661af63e3e00","Type":"ContainerStarted","Data":"abb08f798ebb511da54f1a3a0dbb09c829c7052b10e9b1ac2be2b34bdb9280cc"} Nov 21 15:51:48 crc kubenswrapper[4774]: I1121 15:51:48.352079 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" event={"ID":"90a66815-e9c7-4b6e-869e-661af63e3e00","Type":"ContainerStarted","Data":"a2c96cf415fc18ecdde0f9a510fe68caef01ba9bfa36d55d84ab381e2282a71f"} Nov 21 15:51:48 crc kubenswrapper[4774]: I1121 15:51:48.379082 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" podStartSLOduration=1.989982133 podStartE2EDuration="2.37906302s" podCreationTimestamp="2025-11-21 15:51:46 +0000 UTC" firstStartedPulling="2025-11-21 15:51:47.117601198 +0000 UTC m=+6497.769800457" lastFinishedPulling="2025-11-21 15:51:47.506682085 +0000 UTC m=+6498.158881344" observedRunningTime="2025-11-21 15:51:48.369393954 +0000 UTC m=+6499.021593213" watchObservedRunningTime="2025-11-21 15:51:48.37906302 +0000 UTC m=+6499.031262279" Nov 21 15:51:51 crc kubenswrapper[4774]: I1121 15:51:51.441475 4774 scope.go:117] "RemoveContainer" containerID="0a5e279aefefc9363d161fa1b6db5f20a23dfaf7e2fb0ccba5539c7d70c056b3" Nov 21 15:51:51 crc kubenswrapper[4774]: I1121 15:51:51.462723 4774 scope.go:117] "RemoveContainer" containerID="62c95dd6def82acb076fbf5bfa8a700a9e11539a07617b496f40292f855c385d" Nov 21 15:51:51 crc kubenswrapper[4774]: I1121 15:51:51.538737 4774 scope.go:117] "RemoveContainer" containerID="b9928033a3d501999626c0f84cedf85c94e2622480d8a399e5039c1dc827c3d4" Nov 21 15:51:51 crc kubenswrapper[4774]: I1121 15:51:51.597257 4774 scope.go:117] "RemoveContainer" containerID="a8ab46e05a1400da72c6dbb0573223d045f8896d56efd66c53c8b57fda3a54ed" Nov 21 15:51:51 crc kubenswrapper[4774]: I1121 15:51:51.673305 4774 scope.go:117] "RemoveContainer" containerID="e6ccb283ce0bea692497d58ef4daece6336c54f35f70e7c256e48a843da117cf" Nov 21 15:51:59 crc kubenswrapper[4774]: I1121 15:51:59.601243 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:51:59 crc kubenswrapper[4774]: I1121 15:51:59.601912 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:52:29 crc kubenswrapper[4774]: I1121 15:52:29.601589 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:52:29 crc kubenswrapper[4774]: I1121 15:52:29.602389 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.107182 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dm6vl"] Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.110516 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dm6vl"] Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.111583 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.150159 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-utilities\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.151008 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-catalog-content\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.151308 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjmhw\" (UniqueName: \"kubernetes.io/projected/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-kube-api-access-jjmhw\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.253549 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-catalog-content\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.253705 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjmhw\" (UniqueName: \"kubernetes.io/projected/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-kube-api-access-jjmhw\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.253738 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-utilities\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.254188 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-catalog-content\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.254242 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-utilities\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.273032 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjmhw\" (UniqueName: \"kubernetes.io/projected/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-kube-api-access-jjmhw\") pod \"redhat-operators-dm6vl\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.442249 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:52:42 crc kubenswrapper[4774]: W1121 15:52:42.956676 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2c24e40b_9885_48db_bbc3_bb5ddb5d1b39.slice/crio-b2311a5510b5e091d9b617ad3c05ee46c5f71053468ecb0cac18ec5ae94faab5 WatchSource:0}: Error finding container b2311a5510b5e091d9b617ad3c05ee46c5f71053468ecb0cac18ec5ae94faab5: Status 404 returned error can't find the container with id b2311a5510b5e091d9b617ad3c05ee46c5f71053468ecb0cac18ec5ae94faab5 Nov 21 15:52:42 crc kubenswrapper[4774]: I1121 15:52:42.980633 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dm6vl"] Nov 21 15:52:43 crc kubenswrapper[4774]: I1121 15:52:43.925941 4774 generic.go:334] "Generic (PLEG): container finished" podID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerID="836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c" exitCode=0 Nov 21 15:52:43 crc kubenswrapper[4774]: I1121 15:52:43.926730 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerDied","Data":"836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c"} Nov 21 15:52:43 crc kubenswrapper[4774]: I1121 15:52:43.926783 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerStarted","Data":"b2311a5510b5e091d9b617ad3c05ee46c5f71053468ecb0cac18ec5ae94faab5"} Nov 21 15:52:44 crc kubenswrapper[4774]: I1121 15:52:44.938069 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerStarted","Data":"d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d"} Nov 21 15:52:52 crc kubenswrapper[4774]: I1121 15:52:52.014778 4774 generic.go:334] "Generic (PLEG): container finished" podID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerID="d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d" exitCode=0 Nov 21 15:52:52 crc kubenswrapper[4774]: I1121 15:52:52.014886 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerDied","Data":"d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d"} Nov 21 15:52:54 crc kubenswrapper[4774]: I1121 15:52:54.047461 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerStarted","Data":"d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3"} Nov 21 15:52:54 crc kubenswrapper[4774]: I1121 15:52:54.078249 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dm6vl" podStartSLOduration=2.564502596 podStartE2EDuration="12.078231091s" podCreationTimestamp="2025-11-21 15:52:42 +0000 UTC" firstStartedPulling="2025-11-21 15:52:43.930239889 +0000 UTC m=+6554.582439178" lastFinishedPulling="2025-11-21 15:52:53.443968414 +0000 UTC m=+6564.096167673" observedRunningTime="2025-11-21 15:52:54.068214835 +0000 UTC m=+6564.720414104" watchObservedRunningTime="2025-11-21 15:52:54.078231091 +0000 UTC m=+6564.730430350" Nov 21 15:52:59 crc kubenswrapper[4774]: I1121 15:52:59.600793 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 15:52:59 crc kubenswrapper[4774]: I1121 15:52:59.601379 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 15:52:59 crc kubenswrapper[4774]: I1121 15:52:59.601435 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 15:52:59 crc kubenswrapper[4774]: I1121 15:52:59.602391 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 15:52:59 crc kubenswrapper[4774]: I1121 15:52:59.602446 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" gracePeriod=600 Nov 21 15:52:59 crc kubenswrapper[4774]: E1121 15:52:59.764630 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:53:00 crc kubenswrapper[4774]: I1121 15:53:00.136049 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" exitCode=0 Nov 21 15:53:00 crc kubenswrapper[4774]: I1121 15:53:00.136103 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268"} Nov 21 15:53:00 crc kubenswrapper[4774]: I1121 15:53:00.136146 4774 scope.go:117] "RemoveContainer" containerID="af288035ac19f9f50a6dc5ab6216f56c123497fcf2d36d36aec3fddf5ed00acc" Nov 21 15:53:00 crc kubenswrapper[4774]: I1121 15:53:00.136941 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:53:00 crc kubenswrapper[4774]: E1121 15:53:00.137242 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:53:02 crc kubenswrapper[4774]: I1121 15:53:02.442662 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:53:02 crc kubenswrapper[4774]: I1121 15:53:02.443221 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:53:03 crc kubenswrapper[4774]: I1121 15:53:03.500686 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dm6vl" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" probeResult="failure" output=< Nov 21 15:53:03 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 15:53:03 crc kubenswrapper[4774]: > Nov 21 15:53:13 crc kubenswrapper[4774]: I1121 15:53:13.093020 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:53:13 crc kubenswrapper[4774]: E1121 15:53:13.093799 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:53:13 crc kubenswrapper[4774]: I1121 15:53:13.489757 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dm6vl" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" probeResult="failure" output=< Nov 21 15:53:13 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 15:53:13 crc kubenswrapper[4774]: > Nov 21 15:53:23 crc kubenswrapper[4774]: I1121 15:53:23.488333 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dm6vl" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" probeResult="failure" output=< Nov 21 15:53:23 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 15:53:23 crc kubenswrapper[4774]: > Nov 21 15:53:28 crc kubenswrapper[4774]: I1121 15:53:28.093259 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:53:28 crc kubenswrapper[4774]: E1121 15:53:28.094173 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:53:32 crc kubenswrapper[4774]: I1121 15:53:32.495060 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:53:32 crc kubenswrapper[4774]: I1121 15:53:32.548167 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:53:32 crc kubenswrapper[4774]: I1121 15:53:32.731334 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dm6vl"] Nov 21 15:53:34 crc kubenswrapper[4774]: I1121 15:53:34.471377 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dm6vl" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" containerID="cri-o://d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3" gracePeriod=2 Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.117178 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.240859 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-utilities\") pod \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.240965 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjmhw\" (UniqueName: \"kubernetes.io/projected/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-kube-api-access-jjmhw\") pod \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.241077 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-catalog-content\") pod \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\" (UID: \"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39\") " Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.241788 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-utilities" (OuterVolumeSpecName: "utilities") pod "2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" (UID: "2c24e40b-9885-48db-bbc3-bb5ddb5d1b39"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.248039 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-kube-api-access-jjmhw" (OuterVolumeSpecName: "kube-api-access-jjmhw") pod "2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" (UID: "2c24e40b-9885-48db-bbc3-bb5ddb5d1b39"). InnerVolumeSpecName "kube-api-access-jjmhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.339847 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" (UID: "2c24e40b-9885-48db-bbc3-bb5ddb5d1b39"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.343843 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.343891 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjmhw\" (UniqueName: \"kubernetes.io/projected/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-kube-api-access-jjmhw\") on node \"crc\" DevicePath \"\"" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.343907 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.483730 4774 generic.go:334] "Generic (PLEG): container finished" podID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerID="d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3" exitCode=0 Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.483845 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerDied","Data":"d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3"} Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.483879 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dm6vl" event={"ID":"2c24e40b-9885-48db-bbc3-bb5ddb5d1b39","Type":"ContainerDied","Data":"b2311a5510b5e091d9b617ad3c05ee46c5f71053468ecb0cac18ec5ae94faab5"} Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.483900 4774 scope.go:117] "RemoveContainer" containerID="d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.484045 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dm6vl" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.518502 4774 scope.go:117] "RemoveContainer" containerID="d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.523267 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dm6vl"] Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.537623 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dm6vl"] Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.562648 4774 scope.go:117] "RemoveContainer" containerID="836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.586757 4774 scope.go:117] "RemoveContainer" containerID="d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3" Nov 21 15:53:35 crc kubenswrapper[4774]: E1121 15:53:35.587463 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3\": container with ID starting with d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3 not found: ID does not exist" containerID="d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.587498 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3"} err="failed to get container status \"d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3\": rpc error: code = NotFound desc = could not find container \"d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3\": container with ID starting with d5fc72c6d81745bc92537d27fb06e9905b69bf00f86fb08245dcc8bca9d100f3 not found: ID does not exist" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.587521 4774 scope.go:117] "RemoveContainer" containerID="d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d" Nov 21 15:53:35 crc kubenswrapper[4774]: E1121 15:53:35.587950 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d\": container with ID starting with d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d not found: ID does not exist" containerID="d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.587973 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d"} err="failed to get container status \"d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d\": rpc error: code = NotFound desc = could not find container \"d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d\": container with ID starting with d227a7e7af1fb635ef3e3b7ac68bcf5d72bda1241099480ef0b72da17f82ab5d not found: ID does not exist" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.587987 4774 scope.go:117] "RemoveContainer" containerID="836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c" Nov 21 15:53:35 crc kubenswrapper[4774]: E1121 15:53:35.588810 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c\": container with ID starting with 836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c not found: ID does not exist" containerID="836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c" Nov 21 15:53:35 crc kubenswrapper[4774]: I1121 15:53:35.588852 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c"} err="failed to get container status \"836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c\": rpc error: code = NotFound desc = could not find container \"836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c\": container with ID starting with 836080ae470d49c5a452c2e26cd6506a6a2810cc90949b12cfdd1022d9c1e98c not found: ID does not exist" Nov 21 15:53:36 crc kubenswrapper[4774]: I1121 15:53:36.109260 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" path="/var/lib/kubelet/pods/2c24e40b-9885-48db-bbc3-bb5ddb5d1b39/volumes" Nov 21 15:53:42 crc kubenswrapper[4774]: I1121 15:53:42.094645 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:53:42 crc kubenswrapper[4774]: E1121 15:53:42.096012 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:53:57 crc kubenswrapper[4774]: I1121 15:53:57.093136 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:53:57 crc kubenswrapper[4774]: E1121 15:53:57.093936 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:54:10 crc kubenswrapper[4774]: I1121 15:54:10.100260 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:54:10 crc kubenswrapper[4774]: E1121 15:54:10.101167 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:54:23 crc kubenswrapper[4774]: I1121 15:54:23.093703 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:54:23 crc kubenswrapper[4774]: E1121 15:54:23.094606 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:54:36 crc kubenswrapper[4774]: I1121 15:54:36.093670 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:54:36 crc kubenswrapper[4774]: E1121 15:54:36.094442 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:54:51 crc kubenswrapper[4774]: I1121 15:54:51.093706 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:54:51 crc kubenswrapper[4774]: E1121 15:54:51.094707 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:54:59 crc kubenswrapper[4774]: I1121 15:54:59.045491 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-create-5v749"] Nov 21 15:54:59 crc kubenswrapper[4774]: I1121 15:54:59.065665 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-a2aa-account-create-rplvw"] Nov 21 15:54:59 crc kubenswrapper[4774]: I1121 15:54:59.078465 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-create-5v749"] Nov 21 15:54:59 crc kubenswrapper[4774]: I1121 15:54:59.089672 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-a2aa-account-create-rplvw"] Nov 21 15:55:00 crc kubenswrapper[4774]: I1121 15:55:00.116734 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cec4856-7315-4613-a792-92590e41b9ee" path="/var/lib/kubelet/pods/1cec4856-7315-4613-a792-92590e41b9ee/volumes" Nov 21 15:55:00 crc kubenswrapper[4774]: I1121 15:55:00.119545 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42c4bad9-a76d-46e9-8933-ab0a80018ae8" path="/var/lib/kubelet/pods/42c4bad9-a76d-46e9-8933-ab0a80018ae8/volumes" Nov 21 15:55:04 crc kubenswrapper[4774]: I1121 15:55:04.200106 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:55:04 crc kubenswrapper[4774]: E1121 15:55:04.202236 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:55:13 crc kubenswrapper[4774]: I1121 15:55:13.033370 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/heat-db-sync-2z6xm"] Nov 21 15:55:13 crc kubenswrapper[4774]: I1121 15:55:13.043031 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/heat-db-sync-2z6xm"] Nov 21 15:55:14 crc kubenswrapper[4774]: I1121 15:55:14.112278 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="103b1e47-f61b-47ee-8de4-a6a4f83cc316" path="/var/lib/kubelet/pods/103b1e47-f61b-47ee-8de4-a6a4f83cc316/volumes" Nov 21 15:55:17 crc kubenswrapper[4774]: I1121 15:55:17.093767 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:55:17 crc kubenswrapper[4774]: E1121 15:55:17.095128 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:55:31 crc kubenswrapper[4774]: I1121 15:55:31.093565 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:55:31 crc kubenswrapper[4774]: E1121 15:55:31.094327 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:55:46 crc kubenswrapper[4774]: I1121 15:55:46.093155 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:55:46 crc kubenswrapper[4774]: E1121 15:55:46.093761 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:55:51 crc kubenswrapper[4774]: I1121 15:55:51.880272 4774 scope.go:117] "RemoveContainer" containerID="85ebe1cd0516ff156cb98bd329ac1185deaf3319793a2c42cae450bb4bb3cc38" Nov 21 15:55:51 crc kubenswrapper[4774]: I1121 15:55:51.919242 4774 scope.go:117] "RemoveContainer" containerID="3d71a078c88580f3ab785fc4e1313f543222412c9d7ca4a0e51a38730d63d2ee" Nov 21 15:55:51 crc kubenswrapper[4774]: I1121 15:55:51.953882 4774 scope.go:117] "RemoveContainer" containerID="8675e79840e4507be66ff05a4e7fee318e98e14285f0d3a119e044593b4e4911" Nov 21 15:55:52 crc kubenswrapper[4774]: I1121 15:55:52.004651 4774 scope.go:117] "RemoveContainer" containerID="a4407449da6b487a8d8d14f62cd079f7e36906f3c8f6e840bf5dbf748acae367" Nov 21 15:55:52 crc kubenswrapper[4774]: I1121 15:55:52.037365 4774 scope.go:117] "RemoveContainer" containerID="4284d2cd568ab8bbda01fbbbfd23eeffbcacd6fe39275849d8f13444a8058555" Nov 21 15:55:57 crc kubenswrapper[4774]: I1121 15:55:57.093742 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:55:57 crc kubenswrapper[4774]: E1121 15:55:57.094631 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.163879 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hzf6v"] Nov 21 15:56:01 crc kubenswrapper[4774]: E1121 15:56:01.165389 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="extract-content" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.165405 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="extract-content" Nov 21 15:56:01 crc kubenswrapper[4774]: E1121 15:56:01.165464 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.165477 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" Nov 21 15:56:01 crc kubenswrapper[4774]: E1121 15:56:01.165506 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="extract-utilities" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.165542 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="extract-utilities" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.165955 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c24e40b-9885-48db-bbc3-bb5ddb5d1b39" containerName="registry-server" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.180288 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.199736 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hzf6v"] Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.273337 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfqzv\" (UniqueName: \"kubernetes.io/projected/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-kube-api-access-zfqzv\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.273782 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-catalog-content\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.274084 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-utilities\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.375164 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-utilities\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.375232 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfqzv\" (UniqueName: \"kubernetes.io/projected/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-kube-api-access-zfqzv\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.375322 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-catalog-content\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.375721 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-utilities\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.375772 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-catalog-content\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.395964 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfqzv\" (UniqueName: \"kubernetes.io/projected/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-kube-api-access-zfqzv\") pod \"redhat-marketplace-hzf6v\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:01 crc kubenswrapper[4774]: I1121 15:56:01.509862 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:02 crc kubenswrapper[4774]: I1121 15:56:02.053802 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hzf6v"] Nov 21 15:56:02 crc kubenswrapper[4774]: I1121 15:56:02.948718 4774 generic.go:334] "Generic (PLEG): container finished" podID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerID="48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d" exitCode=0 Nov 21 15:56:02 crc kubenswrapper[4774]: I1121 15:56:02.948847 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerDied","Data":"48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d"} Nov 21 15:56:02 crc kubenswrapper[4774]: I1121 15:56:02.949670 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerStarted","Data":"fa273226d2761a7aad4de9b8c6f2387986709dcc9ca9bc03d7284523b7381426"} Nov 21 15:56:02 crc kubenswrapper[4774]: I1121 15:56:02.951445 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 15:56:04 crc kubenswrapper[4774]: I1121 15:56:04.975896 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerStarted","Data":"acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283"} Nov 21 15:56:05 crc kubenswrapper[4774]: I1121 15:56:05.940356 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-srlj6"] Nov 21 15:56:05 crc kubenswrapper[4774]: I1121 15:56:05.943526 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:05 crc kubenswrapper[4774]: I1121 15:56:05.958594 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-srlj6"] Nov 21 15:56:05 crc kubenswrapper[4774]: I1121 15:56:05.988349 4774 generic.go:334] "Generic (PLEG): container finished" podID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerID="acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283" exitCode=0 Nov 21 15:56:05 crc kubenswrapper[4774]: I1121 15:56:05.988397 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerDied","Data":"acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283"} Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.083976 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd8ss\" (UniqueName: \"kubernetes.io/projected/8c206c22-5d8f-4e3b-be98-5b09e0fea701-kube-api-access-gd8ss\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.084590 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-utilities\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.084628 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-catalog-content\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.187220 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-utilities\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.187550 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-catalog-content\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.187724 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd8ss\" (UniqueName: \"kubernetes.io/projected/8c206c22-5d8f-4e3b-be98-5b09e0fea701-kube-api-access-gd8ss\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.187880 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-utilities\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.188123 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-catalog-content\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.208867 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd8ss\" (UniqueName: \"kubernetes.io/projected/8c206c22-5d8f-4e3b-be98-5b09e0fea701-kube-api-access-gd8ss\") pod \"community-operators-srlj6\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.273139 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.876786 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-srlj6"] Nov 21 15:56:06 crc kubenswrapper[4774]: I1121 15:56:06.999574 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerStarted","Data":"36a2c5f445415f5af4aa68f8c8b52fbde1086ad1b498dddea14866591aab025e"} Nov 21 15:56:08 crc kubenswrapper[4774]: I1121 15:56:08.011112 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerStarted","Data":"07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac"} Nov 21 15:56:08 crc kubenswrapper[4774]: I1121 15:56:08.012618 4774 generic.go:334] "Generic (PLEG): container finished" podID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerID="e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef" exitCode=0 Nov 21 15:56:08 crc kubenswrapper[4774]: I1121 15:56:08.012664 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerDied","Data":"e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef"} Nov 21 15:56:08 crc kubenswrapper[4774]: I1121 15:56:08.033018 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hzf6v" podStartSLOduration=3.181246529 podStartE2EDuration="7.032998787s" podCreationTimestamp="2025-11-21 15:56:01 +0000 UTC" firstStartedPulling="2025-11-21 15:56:02.951209285 +0000 UTC m=+6753.603408544" lastFinishedPulling="2025-11-21 15:56:06.802961543 +0000 UTC m=+6757.455160802" observedRunningTime="2025-11-21 15:56:08.027353026 +0000 UTC m=+6758.679552325" watchObservedRunningTime="2025-11-21 15:56:08.032998787 +0000 UTC m=+6758.685198046" Nov 21 15:56:08 crc kubenswrapper[4774]: I1121 15:56:08.093254 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:56:08 crc kubenswrapper[4774]: E1121 15:56:08.093525 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:56:10 crc kubenswrapper[4774]: I1121 15:56:10.036507 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerStarted","Data":"6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b"} Nov 21 15:56:11 crc kubenswrapper[4774]: I1121 15:56:11.510543 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:11 crc kubenswrapper[4774]: I1121 15:56:11.510940 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:11 crc kubenswrapper[4774]: I1121 15:56:11.567064 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:12 crc kubenswrapper[4774]: I1121 15:56:12.123185 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:13 crc kubenswrapper[4774]: I1121 15:56:13.070444 4774 generic.go:334] "Generic (PLEG): container finished" podID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerID="6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b" exitCode=0 Nov 21 15:56:13 crc kubenswrapper[4774]: I1121 15:56:13.071167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerDied","Data":"6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b"} Nov 21 15:56:13 crc kubenswrapper[4774]: I1121 15:56:13.936645 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hzf6v"] Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.085163 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hzf6v" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="registry-server" containerID="cri-o://07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac" gracePeriod=2 Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.652836 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.817377 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-catalog-content\") pod \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.817431 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfqzv\" (UniqueName: \"kubernetes.io/projected/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-kube-api-access-zfqzv\") pod \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.817546 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-utilities\") pod \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\" (UID: \"1afa4ef8-acf8-4ea3-8fa3-5672833967b1\") " Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.818891 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-utilities" (OuterVolumeSpecName: "utilities") pod "1afa4ef8-acf8-4ea3-8fa3-5672833967b1" (UID: "1afa4ef8-acf8-4ea3-8fa3-5672833967b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.824071 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-kube-api-access-zfqzv" (OuterVolumeSpecName: "kube-api-access-zfqzv") pod "1afa4ef8-acf8-4ea3-8fa3-5672833967b1" (UID: "1afa4ef8-acf8-4ea3-8fa3-5672833967b1"). InnerVolumeSpecName "kube-api-access-zfqzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.838987 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1afa4ef8-acf8-4ea3-8fa3-5672833967b1" (UID: "1afa4ef8-acf8-4ea3-8fa3-5672833967b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.920730 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.920793 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfqzv\" (UniqueName: \"kubernetes.io/projected/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-kube-api-access-zfqzv\") on node \"crc\" DevicePath \"\"" Nov 21 15:56:14 crc kubenswrapper[4774]: I1121 15:56:14.920809 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1afa4ef8-acf8-4ea3-8fa3-5672833967b1-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.097496 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerStarted","Data":"18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3"} Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.105390 4774 generic.go:334] "Generic (PLEG): container finished" podID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerID="07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac" exitCode=0 Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.105444 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerDied","Data":"07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac"} Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.105478 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hzf6v" event={"ID":"1afa4ef8-acf8-4ea3-8fa3-5672833967b1","Type":"ContainerDied","Data":"fa273226d2761a7aad4de9b8c6f2387986709dcc9ca9bc03d7284523b7381426"} Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.105523 4774 scope.go:117] "RemoveContainer" containerID="07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.105484 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hzf6v" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.119494 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-srlj6" podStartSLOduration=3.932576728 podStartE2EDuration="10.11947632s" podCreationTimestamp="2025-11-21 15:56:05 +0000 UTC" firstStartedPulling="2025-11-21 15:56:08.014475109 +0000 UTC m=+6758.666674368" lastFinishedPulling="2025-11-21 15:56:14.201374701 +0000 UTC m=+6764.853573960" observedRunningTime="2025-11-21 15:56:15.114423746 +0000 UTC m=+6765.766623015" watchObservedRunningTime="2025-11-21 15:56:15.11947632 +0000 UTC m=+6765.771675579" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.129019 4774 scope.go:117] "RemoveContainer" containerID="acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.146478 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hzf6v"] Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.154534 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hzf6v"] Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.176750 4774 scope.go:117] "RemoveContainer" containerID="48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.208906 4774 scope.go:117] "RemoveContainer" containerID="07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac" Nov 21 15:56:15 crc kubenswrapper[4774]: E1121 15:56:15.209575 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac\": container with ID starting with 07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac not found: ID does not exist" containerID="07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.209730 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac"} err="failed to get container status \"07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac\": rpc error: code = NotFound desc = could not find container \"07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac\": container with ID starting with 07e46e2c77b11d0a253351cb1dd7e22a3c649b7007644a2b6745e739bf75f0ac not found: ID does not exist" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.209886 4774 scope.go:117] "RemoveContainer" containerID="acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283" Nov 21 15:56:15 crc kubenswrapper[4774]: E1121 15:56:15.210438 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283\": container with ID starting with acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283 not found: ID does not exist" containerID="acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.210472 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283"} err="failed to get container status \"acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283\": rpc error: code = NotFound desc = could not find container \"acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283\": container with ID starting with acb59faba5ee2a6bcdb793750bb77de1a86af5f1e1c939b007d5e45fbb795283 not found: ID does not exist" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.210498 4774 scope.go:117] "RemoveContainer" containerID="48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d" Nov 21 15:56:15 crc kubenswrapper[4774]: E1121 15:56:15.210980 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d\": container with ID starting with 48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d not found: ID does not exist" containerID="48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d" Nov 21 15:56:15 crc kubenswrapper[4774]: I1121 15:56:15.211005 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d"} err="failed to get container status \"48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d\": rpc error: code = NotFound desc = could not find container \"48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d\": container with ID starting with 48e79bb77bc970844f2e16e203fc2c94d8a0c6bf9db791cdded70bf386e2c72d not found: ID does not exist" Nov 21 15:56:16 crc kubenswrapper[4774]: I1121 15:56:16.105744 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" path="/var/lib/kubelet/pods/1afa4ef8-acf8-4ea3-8fa3-5672833967b1/volumes" Nov 21 15:56:16 crc kubenswrapper[4774]: I1121 15:56:16.274372 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:16 crc kubenswrapper[4774]: I1121 15:56:16.274424 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:16 crc kubenswrapper[4774]: I1121 15:56:16.324416 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:23 crc kubenswrapper[4774]: I1121 15:56:23.093496 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:56:23 crc kubenswrapper[4774]: E1121 15:56:23.094572 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:56:26 crc kubenswrapper[4774]: I1121 15:56:26.348258 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:26 crc kubenswrapper[4774]: I1121 15:56:26.400965 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-srlj6"] Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.230205 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-srlj6" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="registry-server" containerID="cri-o://18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3" gracePeriod=2 Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.746013 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.936698 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gd8ss\" (UniqueName: \"kubernetes.io/projected/8c206c22-5d8f-4e3b-be98-5b09e0fea701-kube-api-access-gd8ss\") pod \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.936850 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-utilities\") pod \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.936879 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-catalog-content\") pod \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\" (UID: \"8c206c22-5d8f-4e3b-be98-5b09e0fea701\") " Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.937888 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-utilities" (OuterVolumeSpecName: "utilities") pod "8c206c22-5d8f-4e3b-be98-5b09e0fea701" (UID: "8c206c22-5d8f-4e3b-be98-5b09e0fea701"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.942103 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c206c22-5d8f-4e3b-be98-5b09e0fea701-kube-api-access-gd8ss" (OuterVolumeSpecName: "kube-api-access-gd8ss") pod "8c206c22-5d8f-4e3b-be98-5b09e0fea701" (UID: "8c206c22-5d8f-4e3b-be98-5b09e0fea701"). InnerVolumeSpecName "kube-api-access-gd8ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:56:27 crc kubenswrapper[4774]: I1121 15:56:27.989760 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c206c22-5d8f-4e3b-be98-5b09e0fea701" (UID: "8c206c22-5d8f-4e3b-be98-5b09e0fea701"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.039712 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gd8ss\" (UniqueName: \"kubernetes.io/projected/8c206c22-5d8f-4e3b-be98-5b09e0fea701-kube-api-access-gd8ss\") on node \"crc\" DevicePath \"\"" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.039782 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.039807 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c206c22-5d8f-4e3b-be98-5b09e0fea701-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.254184 4774 generic.go:334] "Generic (PLEG): container finished" podID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerID="18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3" exitCode=0 Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.254265 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srlj6" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.254261 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerDied","Data":"18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3"} Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.254782 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srlj6" event={"ID":"8c206c22-5d8f-4e3b-be98-5b09e0fea701","Type":"ContainerDied","Data":"36a2c5f445415f5af4aa68f8c8b52fbde1086ad1b498dddea14866591aab025e"} Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.254806 4774 scope.go:117] "RemoveContainer" containerID="18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.300870 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-srlj6"] Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.310201 4774 scope.go:117] "RemoveContainer" containerID="6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.315723 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-srlj6"] Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.336713 4774 scope.go:117] "RemoveContainer" containerID="e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.384501 4774 scope.go:117] "RemoveContainer" containerID="18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3" Nov 21 15:56:28 crc kubenswrapper[4774]: E1121 15:56:28.385289 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3\": container with ID starting with 18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3 not found: ID does not exist" containerID="18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.385377 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3"} err="failed to get container status \"18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3\": rpc error: code = NotFound desc = could not find container \"18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3\": container with ID starting with 18284ac9f62125c16dab3be135d61affb4116259ebe716497ade4a8d023d6fb3 not found: ID does not exist" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.385432 4774 scope.go:117] "RemoveContainer" containerID="6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b" Nov 21 15:56:28 crc kubenswrapper[4774]: E1121 15:56:28.385910 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b\": container with ID starting with 6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b not found: ID does not exist" containerID="6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.385953 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b"} err="failed to get container status \"6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b\": rpc error: code = NotFound desc = could not find container \"6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b\": container with ID starting with 6b7841fb3571a07488e7895f844d18a6c4b4b40b19bfa9a396b743f3bde9a87b not found: ID does not exist" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.385990 4774 scope.go:117] "RemoveContainer" containerID="e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef" Nov 21 15:56:28 crc kubenswrapper[4774]: E1121 15:56:28.386363 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef\": container with ID starting with e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef not found: ID does not exist" containerID="e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef" Nov 21 15:56:28 crc kubenswrapper[4774]: I1121 15:56:28.386409 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef"} err="failed to get container status \"e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef\": rpc error: code = NotFound desc = could not find container \"e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef\": container with ID starting with e15af3a7f0cd5496c8eac408bbe8cdecf1ac91a6e63d16dc85293a1034ae72ef not found: ID does not exist" Nov 21 15:56:30 crc kubenswrapper[4774]: I1121 15:56:30.108854 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" path="/var/lib/kubelet/pods/8c206c22-5d8f-4e3b-be98-5b09e0fea701/volumes" Nov 21 15:56:35 crc kubenswrapper[4774]: I1121 15:56:35.093669 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:56:35 crc kubenswrapper[4774]: E1121 15:56:35.094498 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:56:49 crc kubenswrapper[4774]: I1121 15:56:49.093589 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:56:49 crc kubenswrapper[4774]: E1121 15:56:49.095081 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:57:01 crc kubenswrapper[4774]: I1121 15:57:01.093494 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:57:01 crc kubenswrapper[4774]: E1121 15:57:01.094291 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:57:12 crc kubenswrapper[4774]: I1121 15:57:12.093314 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:57:12 crc kubenswrapper[4774]: E1121 15:57:12.094301 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:57:27 crc kubenswrapper[4774]: I1121 15:57:27.093760 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:57:27 crc kubenswrapper[4774]: E1121 15:57:27.094955 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:57:39 crc kubenswrapper[4774]: I1121 15:57:39.093689 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:57:39 crc kubenswrapper[4774]: E1121 15:57:39.094490 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:57:50 crc kubenswrapper[4774]: I1121 15:57:50.099017 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:57:50 crc kubenswrapper[4774]: E1121 15:57:50.099859 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 15:57:55 crc kubenswrapper[4774]: I1121 15:57:55.056722 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-create-9sr4x"] Nov 21 15:57:55 crc kubenswrapper[4774]: I1121 15:57:55.067539 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-create-9sr4x"] Nov 21 15:57:56 crc kubenswrapper[4774]: I1121 15:57:56.033770 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-4f3c-account-create-f99kd"] Nov 21 15:57:56 crc kubenswrapper[4774]: I1121 15:57:56.046124 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-4f3c-account-create-f99kd"] Nov 21 15:57:56 crc kubenswrapper[4774]: I1121 15:57:56.105808 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab64234b-0075-4e20-9b12-f7282f5a966e" path="/var/lib/kubelet/pods/ab64234b-0075-4e20-9b12-f7282f5a966e/volumes" Nov 21 15:57:56 crc kubenswrapper[4774]: I1121 15:57:56.116319 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb53ff10-9a77-4bac-ba20-1c93bb97973b" path="/var/lib/kubelet/pods/fb53ff10-9a77-4bac-ba20-1c93bb97973b/volumes" Nov 21 15:58:03 crc kubenswrapper[4774]: I1121 15:58:03.093992 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 15:58:04 crc kubenswrapper[4774]: I1121 15:58:04.481092 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"3a2ff5bc3df476d6aa0bbcbb613be871d0c77839474cd4648a29a7332c51ade6"} Nov 21 15:58:08 crc kubenswrapper[4774]: I1121 15:58:08.032227 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/aodh-db-sync-jwdbd"] Nov 21 15:58:08 crc kubenswrapper[4774]: I1121 15:58:08.041812 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/aodh-db-sync-jwdbd"] Nov 21 15:58:08 crc kubenswrapper[4774]: I1121 15:58:08.109112 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="951576d9-41e4-44c5-8c46-53b97d979449" path="/var/lib/kubelet/pods/951576d9-41e4-44c5-8c46-53b97d979449/volumes" Nov 21 15:58:41 crc kubenswrapper[4774]: I1121 15:58:41.044707 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-3c58-account-create-tnkjp"] Nov 21 15:58:41 crc kubenswrapper[4774]: I1121 15:58:41.055843 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-create-6gh4k"] Nov 21 15:58:41 crc kubenswrapper[4774]: I1121 15:58:41.065201 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-3c58-account-create-tnkjp"] Nov 21 15:58:41 crc kubenswrapper[4774]: I1121 15:58:41.075918 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-create-6gh4k"] Nov 21 15:58:42 crc kubenswrapper[4774]: I1121 15:58:42.105869 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e9ab910-701e-4dcc-baae-ef62322dc2da" path="/var/lib/kubelet/pods/7e9ab910-701e-4dcc-baae-ef62322dc2da/volumes" Nov 21 15:58:42 crc kubenswrapper[4774]: I1121 15:58:42.106580 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf189446-356a-40a4-bea0-52433af962d0" path="/var/lib/kubelet/pods/bf189446-356a-40a4-bea0-52433af962d0/volumes" Nov 21 15:58:52 crc kubenswrapper[4774]: I1121 15:58:52.242102 4774 scope.go:117] "RemoveContainer" containerID="0e0b0b0ffb31c124ec5331935fb0e508650cf594772babd684b0d5230e242873" Nov 21 15:58:52 crc kubenswrapper[4774]: I1121 15:58:52.276953 4774 scope.go:117] "RemoveContainer" containerID="499198ccabeadbbfa0cccad0b00e94d47cf4e7d5f05f1dc15a0e780b1390e209" Nov 21 15:58:52 crc kubenswrapper[4774]: I1121 15:58:52.342202 4774 scope.go:117] "RemoveContainer" containerID="87d7ff61c61be89e9ed17e17b4a4618b8a3b6a228fd79946bd42fad45ef7391c" Nov 21 15:58:52 crc kubenswrapper[4774]: I1121 15:58:52.399908 4774 scope.go:117] "RemoveContainer" containerID="c610da57df3c95c435e2046d9dc916bc2f234851566fbc953fc57d91dd1a7971" Nov 21 15:58:52 crc kubenswrapper[4774]: I1121 15:58:52.441494 4774 scope.go:117] "RemoveContainer" containerID="62d05e60bbe92a165f70f4fa6e83f3e2e6365eb1eaef820f650fc05f9ba0fdb1" Nov 21 15:59:02 crc kubenswrapper[4774]: I1121 15:59:02.040164 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/manila-db-sync-rdhgd"] Nov 21 15:59:02 crc kubenswrapper[4774]: I1121 15:59:02.048517 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/manila-db-sync-rdhgd"] Nov 21 15:59:02 crc kubenswrapper[4774]: I1121 15:59:02.108670 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b60322c-4057-471e-9af8-c69b88f7df9f" path="/var/lib/kubelet/pods/4b60322c-4057-471e-9af8-c69b88f7df9f/volumes" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.839450 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r4xc9"] Nov 21 15:59:27 crc kubenswrapper[4774]: E1121 15:59:27.843441 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="registry-server" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843466 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="registry-server" Nov 21 15:59:27 crc kubenswrapper[4774]: E1121 15:59:27.843490 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="extract-content" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843498 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="extract-content" Nov 21 15:59:27 crc kubenswrapper[4774]: E1121 15:59:27.843519 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="extract-utilities" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843527 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="extract-utilities" Nov 21 15:59:27 crc kubenswrapper[4774]: E1121 15:59:27.843539 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="extract-content" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843544 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="extract-content" Nov 21 15:59:27 crc kubenswrapper[4774]: E1121 15:59:27.843565 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="extract-utilities" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843571 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="extract-utilities" Nov 21 15:59:27 crc kubenswrapper[4774]: E1121 15:59:27.843580 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="registry-server" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843585 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="registry-server" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843882 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c206c22-5d8f-4e3b-be98-5b09e0fea701" containerName="registry-server" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.843908 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1afa4ef8-acf8-4ea3-8fa3-5672833967b1" containerName="registry-server" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.845522 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.862363 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r4xc9"] Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.959915 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-catalog-content\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.960036 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxkf8\" (UniqueName: \"kubernetes.io/projected/a111e66f-c068-43e9-a618-3c8310ac67c7-kube-api-access-vxkf8\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:27 crc kubenswrapper[4774]: I1121 15:59:27.960132 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-utilities\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.062518 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-catalog-content\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.062612 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxkf8\" (UniqueName: \"kubernetes.io/projected/a111e66f-c068-43e9-a618-3c8310ac67c7-kube-api-access-vxkf8\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.062666 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-utilities\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.063278 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-utilities\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.063362 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-catalog-content\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.087382 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxkf8\" (UniqueName: \"kubernetes.io/projected/a111e66f-c068-43e9-a618-3c8310ac67c7-kube-api-access-vxkf8\") pod \"certified-operators-r4xc9\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.173970 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:28 crc kubenswrapper[4774]: I1121 15:59:28.866336 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r4xc9"] Nov 21 15:59:29 crc kubenswrapper[4774]: I1121 15:59:29.523518 4774 generic.go:334] "Generic (PLEG): container finished" podID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerID="017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b" exitCode=0 Nov 21 15:59:29 crc kubenswrapper[4774]: I1121 15:59:29.523580 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerDied","Data":"017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b"} Nov 21 15:59:29 crc kubenswrapper[4774]: I1121 15:59:29.523950 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerStarted","Data":"fc7c111ef4498904c02e9c8331978fcfb910f776d6c0f350a994c0fddec546ad"} Nov 21 15:59:32 crc kubenswrapper[4774]: I1121 15:59:32.560119 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerStarted","Data":"f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0"} Nov 21 15:59:35 crc kubenswrapper[4774]: I1121 15:59:35.598882 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerDied","Data":"f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0"} Nov 21 15:59:35 crc kubenswrapper[4774]: I1121 15:59:35.598985 4774 generic.go:334] "Generic (PLEG): container finished" podID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerID="f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0" exitCode=0 Nov 21 15:59:37 crc kubenswrapper[4774]: I1121 15:59:37.629183 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerStarted","Data":"c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d"} Nov 21 15:59:37 crc kubenswrapper[4774]: I1121 15:59:37.656717 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r4xc9" podStartSLOduration=3.647036853 podStartE2EDuration="10.656698353s" podCreationTimestamp="2025-11-21 15:59:27 +0000 UTC" firstStartedPulling="2025-11-21 15:59:29.526856174 +0000 UTC m=+6960.179055433" lastFinishedPulling="2025-11-21 15:59:36.536517674 +0000 UTC m=+6967.188716933" observedRunningTime="2025-11-21 15:59:37.648414706 +0000 UTC m=+6968.300613985" watchObservedRunningTime="2025-11-21 15:59:37.656698353 +0000 UTC m=+6968.308897612" Nov 21 15:59:38 crc kubenswrapper[4774]: I1121 15:59:38.175309 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:38 crc kubenswrapper[4774]: I1121 15:59:38.175356 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:38 crc kubenswrapper[4774]: I1121 15:59:38.246927 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:48 crc kubenswrapper[4774]: I1121 15:59:48.250420 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:48 crc kubenswrapper[4774]: I1121 15:59:48.304150 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r4xc9"] Nov 21 15:59:48 crc kubenswrapper[4774]: I1121 15:59:48.745048 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r4xc9" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="registry-server" containerID="cri-o://c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d" gracePeriod=2 Nov 21 15:59:48 crc kubenswrapper[4774]: E1121 15:59:48.938851 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda111e66f_c068_43e9_a618_3c8310ac67c7.slice/crio-conmon-c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d.scope\": RecentStats: unable to find data in memory cache]" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.233626 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.357614 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-catalog-content\") pod \"a111e66f-c068-43e9-a618-3c8310ac67c7\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.357868 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxkf8\" (UniqueName: \"kubernetes.io/projected/a111e66f-c068-43e9-a618-3c8310ac67c7-kube-api-access-vxkf8\") pod \"a111e66f-c068-43e9-a618-3c8310ac67c7\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.358066 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-utilities\") pod \"a111e66f-c068-43e9-a618-3c8310ac67c7\" (UID: \"a111e66f-c068-43e9-a618-3c8310ac67c7\") " Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.359632 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-utilities" (OuterVolumeSpecName: "utilities") pod "a111e66f-c068-43e9-a618-3c8310ac67c7" (UID: "a111e66f-c068-43e9-a618-3c8310ac67c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.366099 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a111e66f-c068-43e9-a618-3c8310ac67c7-kube-api-access-vxkf8" (OuterVolumeSpecName: "kube-api-access-vxkf8") pod "a111e66f-c068-43e9-a618-3c8310ac67c7" (UID: "a111e66f-c068-43e9-a618-3c8310ac67c7"). InnerVolumeSpecName "kube-api-access-vxkf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.409966 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a111e66f-c068-43e9-a618-3c8310ac67c7" (UID: "a111e66f-c068-43e9-a618-3c8310ac67c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.460346 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.460600 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a111e66f-c068-43e9-a618-3c8310ac67c7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.460612 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxkf8\" (UniqueName: \"kubernetes.io/projected/a111e66f-c068-43e9-a618-3c8310ac67c7-kube-api-access-vxkf8\") on node \"crc\" DevicePath \"\"" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.756908 4774 generic.go:334] "Generic (PLEG): container finished" podID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerID="c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d" exitCode=0 Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.756963 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerDied","Data":"c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d"} Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.757003 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r4xc9" event={"ID":"a111e66f-c068-43e9-a618-3c8310ac67c7","Type":"ContainerDied","Data":"fc7c111ef4498904c02e9c8331978fcfb910f776d6c0f350a994c0fddec546ad"} Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.757016 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r4xc9" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.757026 4774 scope.go:117] "RemoveContainer" containerID="c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.779643 4774 scope.go:117] "RemoveContainer" containerID="f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.803743 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r4xc9"] Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.817262 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r4xc9"] Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.820510 4774 scope.go:117] "RemoveContainer" containerID="017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.853537 4774 scope.go:117] "RemoveContainer" containerID="c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d" Nov 21 15:59:49 crc kubenswrapper[4774]: E1121 15:59:49.854179 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d\": container with ID starting with c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d not found: ID does not exist" containerID="c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.854216 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d"} err="failed to get container status \"c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d\": rpc error: code = NotFound desc = could not find container \"c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d\": container with ID starting with c219ea1b36197f25dc617282ef08e2fb3509f013db16295d1e343b2a3166f05d not found: ID does not exist" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.854243 4774 scope.go:117] "RemoveContainer" containerID="f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0" Nov 21 15:59:49 crc kubenswrapper[4774]: E1121 15:59:49.854769 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0\": container with ID starting with f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0 not found: ID does not exist" containerID="f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.854811 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0"} err="failed to get container status \"f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0\": rpc error: code = NotFound desc = could not find container \"f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0\": container with ID starting with f44fdc197d64b5dbdfe33c56bf5b7a8e2279dc83b91206686d096f775fd193e0 not found: ID does not exist" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.854863 4774 scope.go:117] "RemoveContainer" containerID="017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b" Nov 21 15:59:49 crc kubenswrapper[4774]: E1121 15:59:49.855303 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b\": container with ID starting with 017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b not found: ID does not exist" containerID="017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b" Nov 21 15:59:49 crc kubenswrapper[4774]: I1121 15:59:49.855327 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b"} err="failed to get container status \"017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b\": rpc error: code = NotFound desc = could not find container \"017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b\": container with ID starting with 017c6574f100502fdbc3b0564880da51d00fb2f4d750f93c0a8e5b619830fb9b not found: ID does not exist" Nov 21 15:59:50 crc kubenswrapper[4774]: I1121 15:59:50.130922 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" path="/var/lib/kubelet/pods/a111e66f-c068-43e9-a618-3c8310ac67c7/volumes" Nov 21 15:59:52 crc kubenswrapper[4774]: I1121 15:59:52.647187 4774 scope.go:117] "RemoveContainer" containerID="d81a2c8ac0e573f67d402c42dbe491e22650d75817a2e42cdff69d30cc7439a0" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.179936 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv"] Nov 21 16:00:00 crc kubenswrapper[4774]: E1121 16:00:00.180937 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="registry-server" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.180955 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="registry-server" Nov 21 16:00:00 crc kubenswrapper[4774]: E1121 16:00:00.180977 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="extract-utilities" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.180985 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="extract-utilities" Nov 21 16:00:00 crc kubenswrapper[4774]: E1121 16:00:00.181008 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="extract-content" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.181016 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="extract-content" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.181324 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="a111e66f-c068-43e9-a618-3c8310ac67c7" containerName="registry-server" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.182359 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.184989 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.185083 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.196614 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv"] Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.291391 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc30023-280b-430c-a86d-3655a938905e-secret-volume\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.291587 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb5dv\" (UniqueName: \"kubernetes.io/projected/afc30023-280b-430c-a86d-3655a938905e-kube-api-access-nb5dv\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.291623 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc30023-280b-430c-a86d-3655a938905e-config-volume\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.393856 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb5dv\" (UniqueName: \"kubernetes.io/projected/afc30023-280b-430c-a86d-3655a938905e-kube-api-access-nb5dv\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.395076 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc30023-280b-430c-a86d-3655a938905e-config-volume\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.395302 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc30023-280b-430c-a86d-3655a938905e-secret-volume\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.395393 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc30023-280b-430c-a86d-3655a938905e-config-volume\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.411884 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc30023-280b-430c-a86d-3655a938905e-secret-volume\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.412131 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb5dv\" (UniqueName: \"kubernetes.io/projected/afc30023-280b-430c-a86d-3655a938905e-kube-api-access-nb5dv\") pod \"collect-profiles-29395680-nhsbv\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:00 crc kubenswrapper[4774]: I1121 16:00:00.508697 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:01 crc kubenswrapper[4774]: I1121 16:00:01.106662 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv"] Nov 21 16:00:01 crc kubenswrapper[4774]: I1121 16:00:01.873710 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" event={"ID":"afc30023-280b-430c-a86d-3655a938905e","Type":"ContainerStarted","Data":"350ead7a4de4dfa93aa2dcd8c09bf42b4e56b2a310b2b9f48cc266d209c15b3b"} Nov 21 16:00:01 crc kubenswrapper[4774]: I1121 16:00:01.874085 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" event={"ID":"afc30023-280b-430c-a86d-3655a938905e","Type":"ContainerStarted","Data":"6aa8cc084f8b9233846401cdbeeb9363980f0347bf13a3cb5e165783ea723719"} Nov 21 16:00:01 crc kubenswrapper[4774]: I1121 16:00:01.891443 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" podStartSLOduration=1.89141781 podStartE2EDuration="1.89141781s" podCreationTimestamp="2025-11-21 16:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:00:01.887636662 +0000 UTC m=+6992.539835981" watchObservedRunningTime="2025-11-21 16:00:01.89141781 +0000 UTC m=+6992.543617099" Nov 21 16:00:02 crc kubenswrapper[4774]: I1121 16:00:02.885722 4774 generic.go:334] "Generic (PLEG): container finished" podID="afc30023-280b-430c-a86d-3655a938905e" containerID="350ead7a4de4dfa93aa2dcd8c09bf42b4e56b2a310b2b9f48cc266d209c15b3b" exitCode=0 Nov 21 16:00:02 crc kubenswrapper[4774]: I1121 16:00:02.885858 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" event={"ID":"afc30023-280b-430c-a86d-3655a938905e","Type":"ContainerDied","Data":"350ead7a4de4dfa93aa2dcd8c09bf42b4e56b2a310b2b9f48cc266d209c15b3b"} Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.281650 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.385685 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc30023-280b-430c-a86d-3655a938905e-secret-volume\") pod \"afc30023-280b-430c-a86d-3655a938905e\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.385904 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nb5dv\" (UniqueName: \"kubernetes.io/projected/afc30023-280b-430c-a86d-3655a938905e-kube-api-access-nb5dv\") pod \"afc30023-280b-430c-a86d-3655a938905e\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.386021 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc30023-280b-430c-a86d-3655a938905e-config-volume\") pod \"afc30023-280b-430c-a86d-3655a938905e\" (UID: \"afc30023-280b-430c-a86d-3655a938905e\") " Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.386790 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc30023-280b-430c-a86d-3655a938905e-config-volume" (OuterVolumeSpecName: "config-volume") pod "afc30023-280b-430c-a86d-3655a938905e" (UID: "afc30023-280b-430c-a86d-3655a938905e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.392931 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc30023-280b-430c-a86d-3655a938905e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "afc30023-280b-430c-a86d-3655a938905e" (UID: "afc30023-280b-430c-a86d-3655a938905e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.393306 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc30023-280b-430c-a86d-3655a938905e-kube-api-access-nb5dv" (OuterVolumeSpecName: "kube-api-access-nb5dv") pod "afc30023-280b-430c-a86d-3655a938905e" (UID: "afc30023-280b-430c-a86d-3655a938905e"). InnerVolumeSpecName "kube-api-access-nb5dv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.489299 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc30023-280b-430c-a86d-3655a938905e-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.489347 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nb5dv\" (UniqueName: \"kubernetes.io/projected/afc30023-280b-430c-a86d-3655a938905e-kube-api-access-nb5dv\") on node \"crc\" DevicePath \"\"" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.489357 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc30023-280b-430c-a86d-3655a938905e-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.914283 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" event={"ID":"afc30023-280b-430c-a86d-3655a938905e","Type":"ContainerDied","Data":"6aa8cc084f8b9233846401cdbeeb9363980f0347bf13a3cb5e165783ea723719"} Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.914329 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6aa8cc084f8b9233846401cdbeeb9363980f0347bf13a3cb5e165783ea723719" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.914397 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv" Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.973699 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9"] Nov 21 16:00:04 crc kubenswrapper[4774]: I1121 16:00:04.982255 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395635-hxld9"] Nov 21 16:00:06 crc kubenswrapper[4774]: I1121 16:00:06.111182 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a747b3c-1ce8-435b-81de-2736a1a28e60" path="/var/lib/kubelet/pods/6a747b3c-1ce8-435b-81de-2736a1a28e60/volumes" Nov 21 16:00:29 crc kubenswrapper[4774]: I1121 16:00:29.600525 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:00:29 crc kubenswrapper[4774]: I1121 16:00:29.601169 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:00:52 crc kubenswrapper[4774]: I1121 16:00:52.781417 4774 scope.go:117] "RemoveContainer" containerID="7981b92e066d00b528d21b9f3f976c8aa87869ee7f050b30aeee71d3d6b3e8f9" Nov 21 16:00:59 crc kubenswrapper[4774]: I1121 16:00:59.600486 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:00:59 crc kubenswrapper[4774]: I1121 16:00:59.601271 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.158025 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29395681-6hjcc"] Nov 21 16:01:00 crc kubenswrapper[4774]: E1121 16:01:00.158502 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc30023-280b-430c-a86d-3655a938905e" containerName="collect-profiles" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.158520 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc30023-280b-430c-a86d-3655a938905e" containerName="collect-profiles" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.158795 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc30023-280b-430c-a86d-3655a938905e" containerName="collect-profiles" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.159604 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.172676 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29395681-6hjcc"] Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.322867 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-combined-ca-bundle\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.323088 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45lc6\" (UniqueName: \"kubernetes.io/projected/575dd31f-98c7-44ff-9fcc-1c29de6d845f-kube-api-access-45lc6\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.323375 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-config-data\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.323808 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-fernet-keys\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.425897 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45lc6\" (UniqueName: \"kubernetes.io/projected/575dd31f-98c7-44ff-9fcc-1c29de6d845f-kube-api-access-45lc6\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.426023 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-config-data\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.426063 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-fernet-keys\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.426131 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-combined-ca-bundle\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.432722 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-combined-ca-bundle\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.432808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-config-data\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.434385 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-fernet-keys\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.440974 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45lc6\" (UniqueName: \"kubernetes.io/projected/575dd31f-98c7-44ff-9fcc-1c29de6d845f-kube-api-access-45lc6\") pod \"keystone-cron-29395681-6hjcc\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.486934 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:00 crc kubenswrapper[4774]: I1121 16:01:00.948096 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29395681-6hjcc"] Nov 21 16:01:01 crc kubenswrapper[4774]: I1121 16:01:01.562845 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29395681-6hjcc" event={"ID":"575dd31f-98c7-44ff-9fcc-1c29de6d845f","Type":"ContainerStarted","Data":"e80c6ec09635324250fecb2a0f3ce18c385f8a5f9e6ba3b37f9bd015f4af26e9"} Nov 21 16:01:01 crc kubenswrapper[4774]: I1121 16:01:01.563615 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29395681-6hjcc" event={"ID":"575dd31f-98c7-44ff-9fcc-1c29de6d845f","Type":"ContainerStarted","Data":"d5facd5a2971faef1decdbd87c2a3d51d91ad1f6177bd7cf22b23fd3d2c77c08"} Nov 21 16:01:01 crc kubenswrapper[4774]: I1121 16:01:01.582945 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29395681-6hjcc" podStartSLOduration=1.5829284750000001 podStartE2EDuration="1.582928475s" podCreationTimestamp="2025-11-21 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:01:01.580929818 +0000 UTC m=+7052.233129127" watchObservedRunningTime="2025-11-21 16:01:01.582928475 +0000 UTC m=+7052.235127734" Nov 21 16:01:04 crc kubenswrapper[4774]: I1121 16:01:04.604762 4774 generic.go:334] "Generic (PLEG): container finished" podID="575dd31f-98c7-44ff-9fcc-1c29de6d845f" containerID="e80c6ec09635324250fecb2a0f3ce18c385f8a5f9e6ba3b37f9bd015f4af26e9" exitCode=0 Nov 21 16:01:04 crc kubenswrapper[4774]: I1121 16:01:04.604854 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29395681-6hjcc" event={"ID":"575dd31f-98c7-44ff-9fcc-1c29de6d845f","Type":"ContainerDied","Data":"e80c6ec09635324250fecb2a0f3ce18c385f8a5f9e6ba3b37f9bd015f4af26e9"} Nov 21 16:01:05 crc kubenswrapper[4774]: I1121 16:01:05.974083 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.056963 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-combined-ca-bundle\") pod \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.057064 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-config-data\") pod \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.057092 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45lc6\" (UniqueName: \"kubernetes.io/projected/575dd31f-98c7-44ff-9fcc-1c29de6d845f-kube-api-access-45lc6\") pod \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.057131 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-fernet-keys\") pod \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\" (UID: \"575dd31f-98c7-44ff-9fcc-1c29de6d845f\") " Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.062328 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/575dd31f-98c7-44ff-9fcc-1c29de6d845f-kube-api-access-45lc6" (OuterVolumeSpecName: "kube-api-access-45lc6") pod "575dd31f-98c7-44ff-9fcc-1c29de6d845f" (UID: "575dd31f-98c7-44ff-9fcc-1c29de6d845f"). InnerVolumeSpecName "kube-api-access-45lc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.062364 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "575dd31f-98c7-44ff-9fcc-1c29de6d845f" (UID: "575dd31f-98c7-44ff-9fcc-1c29de6d845f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.293129 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45lc6\" (UniqueName: \"kubernetes.io/projected/575dd31f-98c7-44ff-9fcc-1c29de6d845f-kube-api-access-45lc6\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.293866 4774 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.305758 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "575dd31f-98c7-44ff-9fcc-1c29de6d845f" (UID: "575dd31f-98c7-44ff-9fcc-1c29de6d845f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.356523 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-config-data" (OuterVolumeSpecName: "config-data") pod "575dd31f-98c7-44ff-9fcc-1c29de6d845f" (UID: "575dd31f-98c7-44ff-9fcc-1c29de6d845f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.398165 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.398206 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/575dd31f-98c7-44ff-9fcc-1c29de6d845f-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.626959 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29395681-6hjcc" event={"ID":"575dd31f-98c7-44ff-9fcc-1c29de6d845f","Type":"ContainerDied","Data":"d5facd5a2971faef1decdbd87c2a3d51d91ad1f6177bd7cf22b23fd3d2c77c08"} Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.627006 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5facd5a2971faef1decdbd87c2a3d51d91ad1f6177bd7cf22b23fd3d2c77c08" Nov 21 16:01:06 crc kubenswrapper[4774]: I1121 16:01:06.627098 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29395681-6hjcc" Nov 21 16:01:29 crc kubenswrapper[4774]: I1121 16:01:29.601072 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:01:29 crc kubenswrapper[4774]: I1121 16:01:29.601650 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:01:29 crc kubenswrapper[4774]: I1121 16:01:29.601703 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:01:29 crc kubenswrapper[4774]: I1121 16:01:29.602407 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3a2ff5bc3df476d6aa0bbcbb613be871d0c77839474cd4648a29a7332c51ade6"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:01:29 crc kubenswrapper[4774]: I1121 16:01:29.602478 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://3a2ff5bc3df476d6aa0bbcbb613be871d0c77839474cd4648a29a7332c51ade6" gracePeriod=600 Nov 21 16:01:30 crc kubenswrapper[4774]: I1121 16:01:30.871753 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="3a2ff5bc3df476d6aa0bbcbb613be871d0c77839474cd4648a29a7332c51ade6" exitCode=0 Nov 21 16:01:30 crc kubenswrapper[4774]: I1121 16:01:30.872100 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"3a2ff5bc3df476d6aa0bbcbb613be871d0c77839474cd4648a29a7332c51ade6"} Nov 21 16:01:30 crc kubenswrapper[4774]: I1121 16:01:30.872129 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4"} Nov 21 16:01:30 crc kubenswrapper[4774]: I1121 16:01:30.872146 4774 scope.go:117] "RemoveContainer" containerID="2c9f3c07f320fedc030beb61aa2689193cc9d9d8350ab06b7aa2c69c03596268" Nov 21 16:01:37 crc kubenswrapper[4774]: I1121 16:01:37.948138 4774 generic.go:334] "Generic (PLEG): container finished" podID="90a66815-e9c7-4b6e-869e-661af63e3e00" containerID="a2c96cf415fc18ecdde0f9a510fe68caef01ba9bfa36d55d84ab381e2282a71f" exitCode=0 Nov 21 16:01:37 crc kubenswrapper[4774]: I1121 16:01:37.948224 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" event={"ID":"90a66815-e9c7-4b6e-869e-661af63e3e00","Type":"ContainerDied","Data":"a2c96cf415fc18ecdde0f9a510fe68caef01ba9bfa36d55d84ab381e2282a71f"} Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.518062 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.621960 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbn6f\" (UniqueName: \"kubernetes.io/projected/90a66815-e9c7-4b6e-869e-661af63e3e00-kube-api-access-bbn6f\") pod \"90a66815-e9c7-4b6e-869e-661af63e3e00\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.622046 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ceph\") pod \"90a66815-e9c7-4b6e-869e-661af63e3e00\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.622767 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-inventory\") pod \"90a66815-e9c7-4b6e-869e-661af63e3e00\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.623106 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-tripleo-cleanup-combined-ca-bundle\") pod \"90a66815-e9c7-4b6e-869e-661af63e3e00\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.623191 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ssh-key\") pod \"90a66815-e9c7-4b6e-869e-661af63e3e00\" (UID: \"90a66815-e9c7-4b6e-869e-661af63e3e00\") " Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.628181 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ceph" (OuterVolumeSpecName: "ceph") pod "90a66815-e9c7-4b6e-869e-661af63e3e00" (UID: "90a66815-e9c7-4b6e-869e-661af63e3e00"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.628811 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-tripleo-cleanup-combined-ca-bundle" (OuterVolumeSpecName: "tripleo-cleanup-combined-ca-bundle") pod "90a66815-e9c7-4b6e-869e-661af63e3e00" (UID: "90a66815-e9c7-4b6e-869e-661af63e3e00"). InnerVolumeSpecName "tripleo-cleanup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.628991 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90a66815-e9c7-4b6e-869e-661af63e3e00-kube-api-access-bbn6f" (OuterVolumeSpecName: "kube-api-access-bbn6f") pod "90a66815-e9c7-4b6e-869e-661af63e3e00" (UID: "90a66815-e9c7-4b6e-869e-661af63e3e00"). InnerVolumeSpecName "kube-api-access-bbn6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.652349 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-inventory" (OuterVolumeSpecName: "inventory") pod "90a66815-e9c7-4b6e-869e-661af63e3e00" (UID: "90a66815-e9c7-4b6e-869e-661af63e3e00"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.659060 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "90a66815-e9c7-4b6e-869e-661af63e3e00" (UID: "90a66815-e9c7-4b6e-869e-661af63e3e00"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.726215 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.726257 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.726267 4774 reconciler_common.go:293] "Volume detached for volume \"tripleo-cleanup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-tripleo-cleanup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.726276 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/90a66815-e9c7-4b6e-869e-661af63e3e00-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.726287 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbn6f\" (UniqueName: \"kubernetes.io/projected/90a66815-e9c7-4b6e-869e-661af63e3e00-kube-api-access-bbn6f\") on node \"crc\" DevicePath \"\"" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.971292 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" event={"ID":"90a66815-e9c7-4b6e-869e-661af63e3e00","Type":"ContainerDied","Data":"abb08f798ebb511da54f1a3a0dbb09c829c7052b10e9b1ac2be2b34bdb9280cc"} Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.971644 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="abb08f798ebb511da54f1a3a0dbb09c829c7052b10e9b1ac2be2b34bdb9280cc" Nov 21 16:01:39 crc kubenswrapper[4774]: I1121 16:01:39.971359 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.250229 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-xh5wz"] Nov 21 16:01:42 crc kubenswrapper[4774]: E1121 16:01:42.251103 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="575dd31f-98c7-44ff-9fcc-1c29de6d845f" containerName="keystone-cron" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.251123 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="575dd31f-98c7-44ff-9fcc-1c29de6d845f" containerName="keystone-cron" Nov 21 16:01:42 crc kubenswrapper[4774]: E1121 16:01:42.251154 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90a66815-e9c7-4b6e-869e-661af63e3e00" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.251162 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="90a66815-e9c7-4b6e-869e-661af63e3e00" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.251357 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="90a66815-e9c7-4b6e-869e-661af63e3e00" containerName="tripleo-cleanup-tripleo-cleanup-openstack-cell1" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.251384 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="575dd31f-98c7-44ff-9fcc-1c29de6d845f" containerName="keystone-cron" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.252130 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.258257 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.258373 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.258427 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.258481 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.263861 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-xh5wz"] Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.291614 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-inventory\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.291661 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.291909 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d778\" (UniqueName: \"kubernetes.io/projected/8643272b-86b9-496c-826e-148e3d20ce71-kube-api-access-7d778\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.292075 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ceph\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.292216 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.394062 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d778\" (UniqueName: \"kubernetes.io/projected/8643272b-86b9-496c-826e-148e3d20ce71-kube-api-access-7d778\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.394186 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ceph\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.394378 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.395776 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-inventory\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.395859 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.403494 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-inventory\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.404034 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ceph\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.404326 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ssh-key\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.404650 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-bootstrap-combined-ca-bundle\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.417494 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d778\" (UniqueName: \"kubernetes.io/projected/8643272b-86b9-496c-826e-148e3d20ce71-kube-api-access-7d778\") pod \"bootstrap-openstack-openstack-cell1-xh5wz\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:42 crc kubenswrapper[4774]: I1121 16:01:42.581388 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:01:43 crc kubenswrapper[4774]: I1121 16:01:43.162999 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-openstack-openstack-cell1-xh5wz"] Nov 21 16:01:43 crc kubenswrapper[4774]: I1121 16:01:43.164062 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:01:44 crc kubenswrapper[4774]: I1121 16:01:44.020641 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" event={"ID":"8643272b-86b9-496c-826e-148e3d20ce71","Type":"ContainerStarted","Data":"05f3a7b8aac11fc3470928e667be60b879dd8a60da920e72fc50c69f4ae118a7"} Nov 21 16:01:45 crc kubenswrapper[4774]: I1121 16:01:45.035745 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" event={"ID":"8643272b-86b9-496c-826e-148e3d20ce71","Type":"ContainerStarted","Data":"0a339bae9b5fa8d1c359d9f3d0de665f9ce6f192523c6b90904a72c9ed6d7011"} Nov 21 16:01:45 crc kubenswrapper[4774]: I1121 16:01:45.056253 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" podStartSLOduration=1.845960708 podStartE2EDuration="3.056228848s" podCreationTimestamp="2025-11-21 16:01:42 +0000 UTC" firstStartedPulling="2025-11-21 16:01:43.163860625 +0000 UTC m=+7093.816059884" lastFinishedPulling="2025-11-21 16:01:44.374128765 +0000 UTC m=+7095.026328024" observedRunningTime="2025-11-21 16:01:45.052971595 +0000 UTC m=+7095.705170884" watchObservedRunningTime="2025-11-21 16:01:45.056228848 +0000 UTC m=+7095.708428107" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.223947 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tdq79"] Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.227920 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.237048 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tdq79"] Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.312054 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-utilities\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.312607 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-catalog-content\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.312772 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzkz7\" (UniqueName: \"kubernetes.io/projected/5b7f2db5-6657-40fd-94a3-1d7ee6678338-kube-api-access-pzkz7\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.415727 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzkz7\" (UniqueName: \"kubernetes.io/projected/5b7f2db5-6657-40fd-94a3-1d7ee6678338-kube-api-access-pzkz7\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.415849 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-utilities\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.415971 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-catalog-content\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.416353 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-utilities\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.416379 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-catalog-content\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.433794 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzkz7\" (UniqueName: \"kubernetes.io/projected/5b7f2db5-6657-40fd-94a3-1d7ee6678338-kube-api-access-pzkz7\") pod \"redhat-operators-tdq79\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:53 crc kubenswrapper[4774]: I1121 16:02:53.563233 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:02:54 crc kubenswrapper[4774]: I1121 16:02:54.080650 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tdq79"] Nov 21 16:02:54 crc kubenswrapper[4774]: I1121 16:02:54.799582 4774 generic.go:334] "Generic (PLEG): container finished" podID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerID="a3f829d81f4b92b4ace3a0c834dc7c524c5f836f55e5d689c00ac975b45b74d8" exitCode=0 Nov 21 16:02:54 crc kubenswrapper[4774]: I1121 16:02:54.799630 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerDied","Data":"a3f829d81f4b92b4ace3a0c834dc7c524c5f836f55e5d689c00ac975b45b74d8"} Nov 21 16:02:54 crc kubenswrapper[4774]: I1121 16:02:54.799891 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerStarted","Data":"f7d5295231273049d95e78ec19493c95c7b37fef709326e5243c4f15e7c45d7f"} Nov 21 16:02:55 crc kubenswrapper[4774]: I1121 16:02:55.812846 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerStarted","Data":"d0206f188ab787748c4d3c514cb030cb5bb8387ca0c1171e222c35903970da64"} Nov 21 16:03:01 crc kubenswrapper[4774]: I1121 16:03:01.879541 4774 generic.go:334] "Generic (PLEG): container finished" podID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerID="d0206f188ab787748c4d3c514cb030cb5bb8387ca0c1171e222c35903970da64" exitCode=0 Nov 21 16:03:01 crc kubenswrapper[4774]: I1121 16:03:01.879616 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerDied","Data":"d0206f188ab787748c4d3c514cb030cb5bb8387ca0c1171e222c35903970da64"} Nov 21 16:03:02 crc kubenswrapper[4774]: I1121 16:03:02.890639 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerStarted","Data":"bfc7907641e2d4c319156405a0e666c67f588346fbdb4f2e08cdea632b64f49b"} Nov 21 16:03:02 crc kubenswrapper[4774]: I1121 16:03:02.912406 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tdq79" podStartSLOduration=2.447951482 podStartE2EDuration="9.912386335s" podCreationTimestamp="2025-11-21 16:02:53 +0000 UTC" firstStartedPulling="2025-11-21 16:02:54.802101655 +0000 UTC m=+7165.454300914" lastFinishedPulling="2025-11-21 16:03:02.266536508 +0000 UTC m=+7172.918735767" observedRunningTime="2025-11-21 16:03:02.908901635 +0000 UTC m=+7173.561100904" watchObservedRunningTime="2025-11-21 16:03:02.912386335 +0000 UTC m=+7173.564585594" Nov 21 16:03:03 crc kubenswrapper[4774]: I1121 16:03:03.563378 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:03:03 crc kubenswrapper[4774]: I1121 16:03:03.563619 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:03:04 crc kubenswrapper[4774]: I1121 16:03:04.618729 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tdq79" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="registry-server" probeResult="failure" output=< Nov 21 16:03:04 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:03:04 crc kubenswrapper[4774]: > Nov 21 16:03:13 crc kubenswrapper[4774]: I1121 16:03:13.623588 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:03:13 crc kubenswrapper[4774]: I1121 16:03:13.672691 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:03:13 crc kubenswrapper[4774]: I1121 16:03:13.859256 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tdq79"] Nov 21 16:03:15 crc kubenswrapper[4774]: I1121 16:03:15.002948 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tdq79" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="registry-server" containerID="cri-o://bfc7907641e2d4c319156405a0e666c67f588346fbdb4f2e08cdea632b64f49b" gracePeriod=2 Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.027370 4774 generic.go:334] "Generic (PLEG): container finished" podID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerID="bfc7907641e2d4c319156405a0e666c67f588346fbdb4f2e08cdea632b64f49b" exitCode=0 Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.027433 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerDied","Data":"bfc7907641e2d4c319156405a0e666c67f588346fbdb4f2e08cdea632b64f49b"} Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.027934 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tdq79" event={"ID":"5b7f2db5-6657-40fd-94a3-1d7ee6678338","Type":"ContainerDied","Data":"f7d5295231273049d95e78ec19493c95c7b37fef709326e5243c4f15e7c45d7f"} Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.027949 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7d5295231273049d95e78ec19493c95c7b37fef709326e5243c4f15e7c45d7f" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.046046 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.221444 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-utilities\") pod \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.221588 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzkz7\" (UniqueName: \"kubernetes.io/projected/5b7f2db5-6657-40fd-94a3-1d7ee6678338-kube-api-access-pzkz7\") pod \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.221854 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-catalog-content\") pod \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\" (UID: \"5b7f2db5-6657-40fd-94a3-1d7ee6678338\") " Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.224424 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-utilities" (OuterVolumeSpecName: "utilities") pod "5b7f2db5-6657-40fd-94a3-1d7ee6678338" (UID: "5b7f2db5-6657-40fd-94a3-1d7ee6678338"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.229266 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b7f2db5-6657-40fd-94a3-1d7ee6678338-kube-api-access-pzkz7" (OuterVolumeSpecName: "kube-api-access-pzkz7") pod "5b7f2db5-6657-40fd-94a3-1d7ee6678338" (UID: "5b7f2db5-6657-40fd-94a3-1d7ee6678338"). InnerVolumeSpecName "kube-api-access-pzkz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.304753 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b7f2db5-6657-40fd-94a3-1d7ee6678338" (UID: "5b7f2db5-6657-40fd-94a3-1d7ee6678338"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.325290 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.325320 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzkz7\" (UniqueName: \"kubernetes.io/projected/5b7f2db5-6657-40fd-94a3-1d7ee6678338-kube-api-access-pzkz7\") on node \"crc\" DevicePath \"\"" Nov 21 16:03:16 crc kubenswrapper[4774]: I1121 16:03:16.325329 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7f2db5-6657-40fd-94a3-1d7ee6678338-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:03:17 crc kubenswrapper[4774]: I1121 16:03:17.039260 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tdq79" Nov 21 16:03:17 crc kubenswrapper[4774]: I1121 16:03:17.073583 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tdq79"] Nov 21 16:03:17 crc kubenswrapper[4774]: I1121 16:03:17.089141 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tdq79"] Nov 21 16:03:18 crc kubenswrapper[4774]: I1121 16:03:18.106945 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" path="/var/lib/kubelet/pods/5b7f2db5-6657-40fd-94a3-1d7ee6678338/volumes" Nov 21 16:03:59 crc kubenswrapper[4774]: I1121 16:03:59.601259 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:03:59 crc kubenswrapper[4774]: I1121 16:03:59.601798 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:04:29 crc kubenswrapper[4774]: I1121 16:04:29.600603 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:04:29 crc kubenswrapper[4774]: I1121 16:04:29.601154 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:04:53 crc kubenswrapper[4774]: I1121 16:04:53.017776 4774 generic.go:334] "Generic (PLEG): container finished" podID="8643272b-86b9-496c-826e-148e3d20ce71" containerID="0a339bae9b5fa8d1c359d9f3d0de665f9ce6f192523c6b90904a72c9ed6d7011" exitCode=0 Nov 21 16:04:53 crc kubenswrapper[4774]: I1121 16:04:53.017903 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" event={"ID":"8643272b-86b9-496c-826e-148e3d20ce71","Type":"ContainerDied","Data":"0a339bae9b5fa8d1c359d9f3d0de665f9ce6f192523c6b90904a72c9ed6d7011"} Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.511660 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.590574 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d778\" (UniqueName: \"kubernetes.io/projected/8643272b-86b9-496c-826e-148e3d20ce71-kube-api-access-7d778\") pod \"8643272b-86b9-496c-826e-148e3d20ce71\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.590770 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-inventory\") pod \"8643272b-86b9-496c-826e-148e3d20ce71\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.590877 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ssh-key\") pod \"8643272b-86b9-496c-826e-148e3d20ce71\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.590949 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-bootstrap-combined-ca-bundle\") pod \"8643272b-86b9-496c-826e-148e3d20ce71\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.590982 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ceph\") pod \"8643272b-86b9-496c-826e-148e3d20ce71\" (UID: \"8643272b-86b9-496c-826e-148e3d20ce71\") " Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.596578 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "8643272b-86b9-496c-826e-148e3d20ce71" (UID: "8643272b-86b9-496c-826e-148e3d20ce71"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.597872 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8643272b-86b9-496c-826e-148e3d20ce71-kube-api-access-7d778" (OuterVolumeSpecName: "kube-api-access-7d778") pod "8643272b-86b9-496c-826e-148e3d20ce71" (UID: "8643272b-86b9-496c-826e-148e3d20ce71"). InnerVolumeSpecName "kube-api-access-7d778". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.598385 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ceph" (OuterVolumeSpecName: "ceph") pod "8643272b-86b9-496c-826e-148e3d20ce71" (UID: "8643272b-86b9-496c-826e-148e3d20ce71"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.622766 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8643272b-86b9-496c-826e-148e3d20ce71" (UID: "8643272b-86b9-496c-826e-148e3d20ce71"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.629804 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-inventory" (OuterVolumeSpecName: "inventory") pod "8643272b-86b9-496c-826e-148e3d20ce71" (UID: "8643272b-86b9-496c-826e-148e3d20ce71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.695053 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.695106 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.695115 4774 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.695127 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8643272b-86b9-496c-826e-148e3d20ce71-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:04:54 crc kubenswrapper[4774]: I1121 16:04:54.695136 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d778\" (UniqueName: \"kubernetes.io/projected/8643272b-86b9-496c-826e-148e3d20ce71-kube-api-access-7d778\") on node \"crc\" DevicePath \"\"" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.041843 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" event={"ID":"8643272b-86b9-496c-826e-148e3d20ce71","Type":"ContainerDied","Data":"05f3a7b8aac11fc3470928e667be60b879dd8a60da920e72fc50c69f4ae118a7"} Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.041912 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05f3a7b8aac11fc3470928e667be60b879dd8a60da920e72fc50c69f4ae118a7" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.041924 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-openstack-openstack-cell1-xh5wz" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.133508 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-jd5xr"] Nov 21 16:04:55 crc kubenswrapper[4774]: E1121 16:04:55.134154 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="registry-server" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.134188 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="registry-server" Nov 21 16:04:55 crc kubenswrapper[4774]: E1121 16:04:55.134222 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="extract-utilities" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.134231 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="extract-utilities" Nov 21 16:04:55 crc kubenswrapper[4774]: E1121 16:04:55.134263 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="extract-content" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.134271 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="extract-content" Nov 21 16:04:55 crc kubenswrapper[4774]: E1121 16:04:55.134297 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8643272b-86b9-496c-826e-148e3d20ce71" containerName="bootstrap-openstack-openstack-cell1" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.134306 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8643272b-86b9-496c-826e-148e3d20ce71" containerName="bootstrap-openstack-openstack-cell1" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.134567 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b7f2db5-6657-40fd-94a3-1d7ee6678338" containerName="registry-server" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.134602 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8643272b-86b9-496c-826e-148e3d20ce71" containerName="bootstrap-openstack-openstack-cell1" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.135644 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.137668 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.137779 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.138083 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.139706 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.144859 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-jd5xr"] Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.207492 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ssh-key\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.207925 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75v5w\" (UniqueName: \"kubernetes.io/projected/467f535e-8c0e-43b0-b241-a85801fcb00a-kube-api-access-75v5w\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.208018 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ceph\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.208116 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-inventory\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.310131 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ceph\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.310304 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-inventory\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.310435 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ssh-key\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.310495 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75v5w\" (UniqueName: \"kubernetes.io/projected/467f535e-8c0e-43b0-b241-a85801fcb00a-kube-api-access-75v5w\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.315720 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ssh-key\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.316710 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-inventory\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.327174 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75v5w\" (UniqueName: \"kubernetes.io/projected/467f535e-8c0e-43b0-b241-a85801fcb00a-kube-api-access-75v5w\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.327366 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ceph\") pod \"download-cache-openstack-openstack-cell1-jd5xr\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.454876 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:04:55 crc kubenswrapper[4774]: W1121 16:04:55.991342 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod467f535e_8c0e_43b0_b241_a85801fcb00a.slice/crio-13a9fa530aa359660af6ce3da472ff23ec744a053d4744250a1aaf19db4b180f WatchSource:0}: Error finding container 13a9fa530aa359660af6ce3da472ff23ec744a053d4744250a1aaf19db4b180f: Status 404 returned error can't find the container with id 13a9fa530aa359660af6ce3da472ff23ec744a053d4744250a1aaf19db4b180f Nov 21 16:04:55 crc kubenswrapper[4774]: I1121 16:04:55.992968 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-openstack-openstack-cell1-jd5xr"] Nov 21 16:04:56 crc kubenswrapper[4774]: I1121 16:04:56.059991 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" event={"ID":"467f535e-8c0e-43b0-b241-a85801fcb00a","Type":"ContainerStarted","Data":"13a9fa530aa359660af6ce3da472ff23ec744a053d4744250a1aaf19db4b180f"} Nov 21 16:04:57 crc kubenswrapper[4774]: I1121 16:04:57.071167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" event={"ID":"467f535e-8c0e-43b0-b241-a85801fcb00a","Type":"ContainerStarted","Data":"c6509de1d8cf29f3f14826143c59639947223e2f224a3d507a425abac06b82a8"} Nov 21 16:04:57 crc kubenswrapper[4774]: I1121 16:04:57.097683 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" podStartSLOduration=1.640707997 podStartE2EDuration="2.097667583s" podCreationTimestamp="2025-11-21 16:04:55 +0000 UTC" firstStartedPulling="2025-11-21 16:04:55.993623054 +0000 UTC m=+7286.645822313" lastFinishedPulling="2025-11-21 16:04:56.45058263 +0000 UTC m=+7287.102781899" observedRunningTime="2025-11-21 16:04:57.086434072 +0000 UTC m=+7287.738633331" watchObservedRunningTime="2025-11-21 16:04:57.097667583 +0000 UTC m=+7287.749866842" Nov 21 16:04:59 crc kubenswrapper[4774]: I1121 16:04:59.600204 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:04:59 crc kubenswrapper[4774]: I1121 16:04:59.600587 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:04:59 crc kubenswrapper[4774]: I1121 16:04:59.600629 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:04:59 crc kubenswrapper[4774]: I1121 16:04:59.601398 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:04:59 crc kubenswrapper[4774]: I1121 16:04:59.601443 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" gracePeriod=600 Nov 21 16:04:59 crc kubenswrapper[4774]: E1121 16:04:59.732776 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:05:00 crc kubenswrapper[4774]: I1121 16:05:00.111478 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" exitCode=0 Nov 21 16:05:00 crc kubenswrapper[4774]: I1121 16:05:00.118259 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4"} Nov 21 16:05:00 crc kubenswrapper[4774]: I1121 16:05:00.118326 4774 scope.go:117] "RemoveContainer" containerID="3a2ff5bc3df476d6aa0bbcbb613be871d0c77839474cd4648a29a7332c51ade6" Nov 21 16:05:00 crc kubenswrapper[4774]: I1121 16:05:00.119181 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:05:00 crc kubenswrapper[4774]: E1121 16:05:00.119790 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:05:13 crc kubenswrapper[4774]: I1121 16:05:13.092968 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:05:13 crc kubenswrapper[4774]: E1121 16:05:13.093708 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:05:24 crc kubenswrapper[4774]: I1121 16:05:24.094062 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:05:24 crc kubenswrapper[4774]: E1121 16:05:24.094648 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:05:37 crc kubenswrapper[4774]: I1121 16:05:37.093368 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:05:37 crc kubenswrapper[4774]: E1121 16:05:37.094156 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:05:51 crc kubenswrapper[4774]: I1121 16:05:51.093211 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:05:51 crc kubenswrapper[4774]: E1121 16:05:51.094104 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.093595 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:06:04 crc kubenswrapper[4774]: E1121 16:06:04.095664 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.138799 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pqh7b"] Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.141711 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.206411 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqh7b"] Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.258761 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-catalog-content\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.259033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-utilities\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.259481 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhktk\" (UniqueName: \"kubernetes.io/projected/2a197aa5-a232-45de-8daf-5a8579f3a7d1-kube-api-access-zhktk\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.364487 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhktk\" (UniqueName: \"kubernetes.io/projected/2a197aa5-a232-45de-8daf-5a8579f3a7d1-kube-api-access-zhktk\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.364627 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-catalog-content\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.364668 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-utilities\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.365188 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-utilities\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.365334 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-catalog-content\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.384885 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhktk\" (UniqueName: \"kubernetes.io/projected/2a197aa5-a232-45de-8daf-5a8579f3a7d1-kube-api-access-zhktk\") pod \"redhat-marketplace-pqh7b\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:04 crc kubenswrapper[4774]: I1121 16:06:04.480393 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:05 crc kubenswrapper[4774]: I1121 16:06:05.263115 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqh7b"] Nov 21 16:06:05 crc kubenswrapper[4774]: I1121 16:06:05.818148 4774 generic.go:334] "Generic (PLEG): container finished" podID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerID="2b55c45068ce80d8540d03564e26b7550afd7af6c441fe604989ce62523f938f" exitCode=0 Nov 21 16:06:05 crc kubenswrapper[4774]: I1121 16:06:05.818263 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerDied","Data":"2b55c45068ce80d8540d03564e26b7550afd7af6c441fe604989ce62523f938f"} Nov 21 16:06:05 crc kubenswrapper[4774]: I1121 16:06:05.818683 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerStarted","Data":"dcbe93b42b3943db77b6a15daea1d6ab6e750e12aecdc16d7f5a87476540ba43"} Nov 21 16:06:06 crc kubenswrapper[4774]: I1121 16:06:06.829264 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerStarted","Data":"d74c0ac9ad672151a921b426e7aff7d2eda9a9513e94a0bfb7b48c786aefd656"} Nov 21 16:06:07 crc kubenswrapper[4774]: I1121 16:06:07.840199 4774 generic.go:334] "Generic (PLEG): container finished" podID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerID="d74c0ac9ad672151a921b426e7aff7d2eda9a9513e94a0bfb7b48c786aefd656" exitCode=0 Nov 21 16:06:07 crc kubenswrapper[4774]: I1121 16:06:07.840256 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerDied","Data":"d74c0ac9ad672151a921b426e7aff7d2eda9a9513e94a0bfb7b48c786aefd656"} Nov 21 16:06:08 crc kubenswrapper[4774]: I1121 16:06:08.886369 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerStarted","Data":"d586b9b9f3c65d98b60b23b2af2508ea3ded9d7e7b8dd80c2cfb90acbeff9101"} Nov 21 16:06:08 crc kubenswrapper[4774]: I1121 16:06:08.914333 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pqh7b" podStartSLOduration=2.464792572 podStartE2EDuration="4.914317631s" podCreationTimestamp="2025-11-21 16:06:04 +0000 UTC" firstStartedPulling="2025-11-21 16:06:05.820364026 +0000 UTC m=+7356.472563295" lastFinishedPulling="2025-11-21 16:06:08.269889095 +0000 UTC m=+7358.922088354" observedRunningTime="2025-11-21 16:06:08.912275533 +0000 UTC m=+7359.564474792" watchObservedRunningTime="2025-11-21 16:06:08.914317631 +0000 UTC m=+7359.566516890" Nov 21 16:06:14 crc kubenswrapper[4774]: I1121 16:06:14.481210 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:14 crc kubenswrapper[4774]: I1121 16:06:14.484581 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:14 crc kubenswrapper[4774]: I1121 16:06:14.543163 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:14 crc kubenswrapper[4774]: I1121 16:06:14.982370 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:15 crc kubenswrapper[4774]: I1121 16:06:15.030921 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqh7b"] Nov 21 16:06:16 crc kubenswrapper[4774]: I1121 16:06:16.958605 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pqh7b" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="registry-server" containerID="cri-o://d586b9b9f3c65d98b60b23b2af2508ea3ded9d7e7b8dd80c2cfb90acbeff9101" gracePeriod=2 Nov 21 16:06:17 crc kubenswrapper[4774]: I1121 16:06:17.973723 4774 generic.go:334] "Generic (PLEG): container finished" podID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerID="d586b9b9f3c65d98b60b23b2af2508ea3ded9d7e7b8dd80c2cfb90acbeff9101" exitCode=0 Nov 21 16:06:17 crc kubenswrapper[4774]: I1121 16:06:17.974023 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerDied","Data":"d586b9b9f3c65d98b60b23b2af2508ea3ded9d7e7b8dd80c2cfb90acbeff9101"} Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.093176 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:06:18 crc kubenswrapper[4774]: E1121 16:06:18.093438 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.187963 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.269985 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-utilities\") pod \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.270440 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhktk\" (UniqueName: \"kubernetes.io/projected/2a197aa5-a232-45de-8daf-5a8579f3a7d1-kube-api-access-zhktk\") pod \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.270654 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-catalog-content\") pod \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\" (UID: \"2a197aa5-a232-45de-8daf-5a8579f3a7d1\") " Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.271277 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-utilities" (OuterVolumeSpecName: "utilities") pod "2a197aa5-a232-45de-8daf-5a8579f3a7d1" (UID: "2a197aa5-a232-45de-8daf-5a8579f3a7d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.276871 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a197aa5-a232-45de-8daf-5a8579f3a7d1-kube-api-access-zhktk" (OuterVolumeSpecName: "kube-api-access-zhktk") pod "2a197aa5-a232-45de-8daf-5a8579f3a7d1" (UID: "2a197aa5-a232-45de-8daf-5a8579f3a7d1"). InnerVolumeSpecName "kube-api-access-zhktk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.289998 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a197aa5-a232-45de-8daf-5a8579f3a7d1" (UID: "2a197aa5-a232-45de-8daf-5a8579f3a7d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.375034 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhktk\" (UniqueName: \"kubernetes.io/projected/2a197aa5-a232-45de-8daf-5a8579f3a7d1-kube-api-access-zhktk\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.375678 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.375714 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a197aa5-a232-45de-8daf-5a8579f3a7d1-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.984699 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqh7b" event={"ID":"2a197aa5-a232-45de-8daf-5a8579f3a7d1","Type":"ContainerDied","Data":"dcbe93b42b3943db77b6a15daea1d6ab6e750e12aecdc16d7f5a87476540ba43"} Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.984754 4774 scope.go:117] "RemoveContainer" containerID="d586b9b9f3c65d98b60b23b2af2508ea3ded9d7e7b8dd80c2cfb90acbeff9101" Nov 21 16:06:18 crc kubenswrapper[4774]: I1121 16:06:18.985654 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqh7b" Nov 21 16:06:19 crc kubenswrapper[4774]: I1121 16:06:19.012909 4774 scope.go:117] "RemoveContainer" containerID="d74c0ac9ad672151a921b426e7aff7d2eda9a9513e94a0bfb7b48c786aefd656" Nov 21 16:06:19 crc kubenswrapper[4774]: I1121 16:06:19.022594 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqh7b"] Nov 21 16:06:19 crc kubenswrapper[4774]: I1121 16:06:19.032264 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqh7b"] Nov 21 16:06:19 crc kubenswrapper[4774]: I1121 16:06:19.058726 4774 scope.go:117] "RemoveContainer" containerID="2b55c45068ce80d8540d03564e26b7550afd7af6c441fe604989ce62523f938f" Nov 21 16:06:20 crc kubenswrapper[4774]: I1121 16:06:20.105537 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" path="/var/lib/kubelet/pods/2a197aa5-a232-45de-8daf-5a8579f3a7d1/volumes" Nov 21 16:06:32 crc kubenswrapper[4774]: I1121 16:06:32.093991 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:06:32 crc kubenswrapper[4774]: E1121 16:06:32.095057 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:06:38 crc kubenswrapper[4774]: I1121 16:06:38.163225 4774 generic.go:334] "Generic (PLEG): container finished" podID="467f535e-8c0e-43b0-b241-a85801fcb00a" containerID="c6509de1d8cf29f3f14826143c59639947223e2f224a3d507a425abac06b82a8" exitCode=0 Nov 21 16:06:38 crc kubenswrapper[4774]: I1121 16:06:38.163339 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" event={"ID":"467f535e-8c0e-43b0-b241-a85801fcb00a","Type":"ContainerDied","Data":"c6509de1d8cf29f3f14826143c59639947223e2f224a3d507a425abac06b82a8"} Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.640999 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.764682 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ssh-key\") pod \"467f535e-8c0e-43b0-b241-a85801fcb00a\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.764779 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-inventory\") pod \"467f535e-8c0e-43b0-b241-a85801fcb00a\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.764889 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75v5w\" (UniqueName: \"kubernetes.io/projected/467f535e-8c0e-43b0-b241-a85801fcb00a-kube-api-access-75v5w\") pod \"467f535e-8c0e-43b0-b241-a85801fcb00a\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.765090 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ceph\") pod \"467f535e-8c0e-43b0-b241-a85801fcb00a\" (UID: \"467f535e-8c0e-43b0-b241-a85801fcb00a\") " Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.770871 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/467f535e-8c0e-43b0-b241-a85801fcb00a-kube-api-access-75v5w" (OuterVolumeSpecName: "kube-api-access-75v5w") pod "467f535e-8c0e-43b0-b241-a85801fcb00a" (UID: "467f535e-8c0e-43b0-b241-a85801fcb00a"). InnerVolumeSpecName "kube-api-access-75v5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.770986 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ceph" (OuterVolumeSpecName: "ceph") pod "467f535e-8c0e-43b0-b241-a85801fcb00a" (UID: "467f535e-8c0e-43b0-b241-a85801fcb00a"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.800886 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "467f535e-8c0e-43b0-b241-a85801fcb00a" (UID: "467f535e-8c0e-43b0-b241-a85801fcb00a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.804569 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-inventory" (OuterVolumeSpecName: "inventory") pod "467f535e-8c0e-43b0-b241-a85801fcb00a" (UID: "467f535e-8c0e-43b0-b241-a85801fcb00a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.868409 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.868446 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.868459 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/467f535e-8c0e-43b0-b241-a85801fcb00a-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:39 crc kubenswrapper[4774]: I1121 16:06:39.868473 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75v5w\" (UniqueName: \"kubernetes.io/projected/467f535e-8c0e-43b0-b241-a85801fcb00a-kube-api-access-75v5w\") on node \"crc\" DevicePath \"\"" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.184178 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" event={"ID":"467f535e-8c0e-43b0-b241-a85801fcb00a","Type":"ContainerDied","Data":"13a9fa530aa359660af6ce3da472ff23ec744a053d4744250a1aaf19db4b180f"} Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.184222 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="13a9fa530aa359660af6ce3da472ff23ec744a053d4744250a1aaf19db4b180f" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.184280 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-openstack-openstack-cell1-jd5xr" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.260767 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-t9t7b"] Nov 21 16:06:40 crc kubenswrapper[4774]: E1121 16:06:40.261548 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="467f535e-8c0e-43b0-b241-a85801fcb00a" containerName="download-cache-openstack-openstack-cell1" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.261574 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="467f535e-8c0e-43b0-b241-a85801fcb00a" containerName="download-cache-openstack-openstack-cell1" Nov 21 16:06:40 crc kubenswrapper[4774]: E1121 16:06:40.261598 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="registry-server" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.261605 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="registry-server" Nov 21 16:06:40 crc kubenswrapper[4774]: E1121 16:06:40.261617 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="extract-utilities" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.261623 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="extract-utilities" Nov 21 16:06:40 crc kubenswrapper[4774]: E1121 16:06:40.261641 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="extract-content" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.261647 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="extract-content" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.261893 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="467f535e-8c0e-43b0-b241-a85801fcb00a" containerName="download-cache-openstack-openstack-cell1" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.261922 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a197aa5-a232-45de-8daf-5a8579f3a7d1" containerName="registry-server" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.262726 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.265009 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.265009 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.273924 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.275036 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-t9t7b"] Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.279740 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.383298 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp8pj\" (UniqueName: \"kubernetes.io/projected/1d149903-950a-438b-89f2-840ef6fca469-kube-api-access-pp8pj\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.383747 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-inventory\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.384154 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ceph\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.384370 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ssh-key\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.486024 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ceph\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.486128 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ssh-key\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.486199 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp8pj\" (UniqueName: \"kubernetes.io/projected/1d149903-950a-438b-89f2-840ef6fca469-kube-api-access-pp8pj\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.486282 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-inventory\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.490232 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ssh-key\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.491401 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-inventory\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.491974 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ceph\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.510336 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp8pj\" (UniqueName: \"kubernetes.io/projected/1d149903-950a-438b-89f2-840ef6fca469-kube-api-access-pp8pj\") pod \"configure-network-openstack-openstack-cell1-t9t7b\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:40 crc kubenswrapper[4774]: I1121 16:06:40.585563 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:06:41 crc kubenswrapper[4774]: I1121 16:06:41.122301 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-openstack-openstack-cell1-t9t7b"] Nov 21 16:06:41 crc kubenswrapper[4774]: W1121 16:06:41.124974 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d149903_950a_438b_89f2_840ef6fca469.slice/crio-0eb8ee83886e61e8f81f4b16b35e4457ea87c1b653668efc812376d84487a800 WatchSource:0}: Error finding container 0eb8ee83886e61e8f81f4b16b35e4457ea87c1b653668efc812376d84487a800: Status 404 returned error can't find the container with id 0eb8ee83886e61e8f81f4b16b35e4457ea87c1b653668efc812376d84487a800 Nov 21 16:06:41 crc kubenswrapper[4774]: I1121 16:06:41.202208 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" event={"ID":"1d149903-950a-438b-89f2-840ef6fca469","Type":"ContainerStarted","Data":"0eb8ee83886e61e8f81f4b16b35e4457ea87c1b653668efc812376d84487a800"} Nov 21 16:06:43 crc kubenswrapper[4774]: I1121 16:06:43.236918 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" event={"ID":"1d149903-950a-438b-89f2-840ef6fca469","Type":"ContainerStarted","Data":"bbdd7eb8afe8c11e5177e27e7b35783f50483d7544e37e93d459527fc2d73dd4"} Nov 21 16:06:43 crc kubenswrapper[4774]: I1121 16:06:43.262905 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" podStartSLOduration=1.914449201 podStartE2EDuration="3.262886406s" podCreationTimestamp="2025-11-21 16:06:40 +0000 UTC" firstStartedPulling="2025-11-21 16:06:41.129033699 +0000 UTC m=+7391.781232968" lastFinishedPulling="2025-11-21 16:06:42.477470704 +0000 UTC m=+7393.129670173" observedRunningTime="2025-11-21 16:06:43.258870481 +0000 UTC m=+7393.911069780" watchObservedRunningTime="2025-11-21 16:06:43.262886406 +0000 UTC m=+7393.915085675" Nov 21 16:06:46 crc kubenswrapper[4774]: I1121 16:06:46.093931 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:06:46 crc kubenswrapper[4774]: E1121 16:06:46.094350 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:07:01 crc kubenswrapper[4774]: I1121 16:07:01.094864 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:07:01 crc kubenswrapper[4774]: E1121 16:07:01.096274 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:07:13 crc kubenswrapper[4774]: I1121 16:07:13.094339 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:07:13 crc kubenswrapper[4774]: E1121 16:07:13.095243 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:07:26 crc kubenswrapper[4774]: I1121 16:07:26.094149 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:07:26 crc kubenswrapper[4774]: E1121 16:07:26.095120 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:07:38 crc kubenswrapper[4774]: I1121 16:07:38.093334 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:07:38 crc kubenswrapper[4774]: E1121 16:07:38.094791 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:07:51 crc kubenswrapper[4774]: I1121 16:07:51.093865 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:07:51 crc kubenswrapper[4774]: E1121 16:07:51.094785 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:08:02 crc kubenswrapper[4774]: I1121 16:08:02.093482 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:08:02 crc kubenswrapper[4774]: E1121 16:08:02.094572 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:08:03 crc kubenswrapper[4774]: I1121 16:08:03.059231 4774 generic.go:334] "Generic (PLEG): container finished" podID="1d149903-950a-438b-89f2-840ef6fca469" containerID="bbdd7eb8afe8c11e5177e27e7b35783f50483d7544e37e93d459527fc2d73dd4" exitCode=0 Nov 21 16:08:03 crc kubenswrapper[4774]: I1121 16:08:03.059281 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" event={"ID":"1d149903-950a-438b-89f2-840ef6fca469","Type":"ContainerDied","Data":"bbdd7eb8afe8c11e5177e27e7b35783f50483d7544e37e93d459527fc2d73dd4"} Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.569530 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.712601 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ssh-key\") pod \"1d149903-950a-438b-89f2-840ef6fca469\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.712669 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-inventory\") pod \"1d149903-950a-438b-89f2-840ef6fca469\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.712877 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pp8pj\" (UniqueName: \"kubernetes.io/projected/1d149903-950a-438b-89f2-840ef6fca469-kube-api-access-pp8pj\") pod \"1d149903-950a-438b-89f2-840ef6fca469\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.712910 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ceph\") pod \"1d149903-950a-438b-89f2-840ef6fca469\" (UID: \"1d149903-950a-438b-89f2-840ef6fca469\") " Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.719982 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d149903-950a-438b-89f2-840ef6fca469-kube-api-access-pp8pj" (OuterVolumeSpecName: "kube-api-access-pp8pj") pod "1d149903-950a-438b-89f2-840ef6fca469" (UID: "1d149903-950a-438b-89f2-840ef6fca469"). InnerVolumeSpecName "kube-api-access-pp8pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.720761 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ceph" (OuterVolumeSpecName: "ceph") pod "1d149903-950a-438b-89f2-840ef6fca469" (UID: "1d149903-950a-438b-89f2-840ef6fca469"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.743574 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-inventory" (OuterVolumeSpecName: "inventory") pod "1d149903-950a-438b-89f2-840ef6fca469" (UID: "1d149903-950a-438b-89f2-840ef6fca469"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.745540 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1d149903-950a-438b-89f2-840ef6fca469" (UID: "1d149903-950a-438b-89f2-840ef6fca469"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.815726 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.815764 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.815776 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pp8pj\" (UniqueName: \"kubernetes.io/projected/1d149903-950a-438b-89f2-840ef6fca469-kube-api-access-pp8pj\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:04 crc kubenswrapper[4774]: I1121 16:08:04.815788 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/1d149903-950a-438b-89f2-840ef6fca469-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.083737 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" event={"ID":"1d149903-950a-438b-89f2-840ef6fca469","Type":"ContainerDied","Data":"0eb8ee83886e61e8f81f4b16b35e4457ea87c1b653668efc812376d84487a800"} Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.083784 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eb8ee83886e61e8f81f4b16b35e4457ea87c1b653668efc812376d84487a800" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.083787 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-openstack-openstack-cell1-t9t7b" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.165017 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-z64fz"] Nov 21 16:08:05 crc kubenswrapper[4774]: E1121 16:08:05.165756 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d149903-950a-438b-89f2-840ef6fca469" containerName="configure-network-openstack-openstack-cell1" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.165779 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d149903-950a-438b-89f2-840ef6fca469" containerName="configure-network-openstack-openstack-cell1" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.166029 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d149903-950a-438b-89f2-840ef6fca469" containerName="configure-network-openstack-openstack-cell1" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.167073 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.169091 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.169574 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.169676 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.169800 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.188367 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-z64fz"] Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.327598 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ceph\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.327656 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-inventory\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.327910 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pqw8\" (UniqueName: \"kubernetes.io/projected/2fbbb706-71fc-47de-b788-ecb529f77d77-kube-api-access-4pqw8\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.328059 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ssh-key\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.430330 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-inventory\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.431379 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pqw8\" (UniqueName: \"kubernetes.io/projected/2fbbb706-71fc-47de-b788-ecb529f77d77-kube-api-access-4pqw8\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.431540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ssh-key\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.431941 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ceph\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.444737 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ssh-key\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.444852 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-inventory\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.445130 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ceph\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.459044 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pqw8\" (UniqueName: \"kubernetes.io/projected/2fbbb706-71fc-47de-b788-ecb529f77d77-kube-api-access-4pqw8\") pod \"validate-network-openstack-openstack-cell1-z64fz\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:05 crc kubenswrapper[4774]: I1121 16:08:05.485860 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:06 crc kubenswrapper[4774]: I1121 16:08:06.047560 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-openstack-openstack-cell1-z64fz"] Nov 21 16:08:06 crc kubenswrapper[4774]: I1121 16:08:06.051568 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:08:06 crc kubenswrapper[4774]: I1121 16:08:06.105868 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" event={"ID":"2fbbb706-71fc-47de-b788-ecb529f77d77","Type":"ContainerStarted","Data":"a9e5c4851f09ed167829a3e94a9753372788805bb7bc416680b7ddd37131a65c"} Nov 21 16:08:07 crc kubenswrapper[4774]: I1121 16:08:07.103951 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" event={"ID":"2fbbb706-71fc-47de-b788-ecb529f77d77","Type":"ContainerStarted","Data":"d9e52da8cccb47ff798e03195dc68fe38438db9599d969c91de01f892148985d"} Nov 21 16:08:07 crc kubenswrapper[4774]: I1121 16:08:07.129224 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" podStartSLOduration=1.71844628 podStartE2EDuration="2.129191921s" podCreationTimestamp="2025-11-21 16:08:05 +0000 UTC" firstStartedPulling="2025-11-21 16:08:06.051355069 +0000 UTC m=+7476.703554328" lastFinishedPulling="2025-11-21 16:08:06.46210069 +0000 UTC m=+7477.114299969" observedRunningTime="2025-11-21 16:08:07.121831271 +0000 UTC m=+7477.774030530" watchObservedRunningTime="2025-11-21 16:08:07.129191921 +0000 UTC m=+7477.781391210" Nov 21 16:08:12 crc kubenswrapper[4774]: I1121 16:08:12.152733 4774 generic.go:334] "Generic (PLEG): container finished" podID="2fbbb706-71fc-47de-b788-ecb529f77d77" containerID="d9e52da8cccb47ff798e03195dc68fe38438db9599d969c91de01f892148985d" exitCode=0 Nov 21 16:08:12 crc kubenswrapper[4774]: I1121 16:08:12.152779 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" event={"ID":"2fbbb706-71fc-47de-b788-ecb529f77d77","Type":"ContainerDied","Data":"d9e52da8cccb47ff798e03195dc68fe38438db9599d969c91de01f892148985d"} Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.631769 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.749095 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pqw8\" (UniqueName: \"kubernetes.io/projected/2fbbb706-71fc-47de-b788-ecb529f77d77-kube-api-access-4pqw8\") pod \"2fbbb706-71fc-47de-b788-ecb529f77d77\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.749193 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ceph\") pod \"2fbbb706-71fc-47de-b788-ecb529f77d77\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.749288 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-inventory\") pod \"2fbbb706-71fc-47de-b788-ecb529f77d77\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.749325 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ssh-key\") pod \"2fbbb706-71fc-47de-b788-ecb529f77d77\" (UID: \"2fbbb706-71fc-47de-b788-ecb529f77d77\") " Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.755299 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fbbb706-71fc-47de-b788-ecb529f77d77-kube-api-access-4pqw8" (OuterVolumeSpecName: "kube-api-access-4pqw8") pod "2fbbb706-71fc-47de-b788-ecb529f77d77" (UID: "2fbbb706-71fc-47de-b788-ecb529f77d77"). InnerVolumeSpecName "kube-api-access-4pqw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.755563 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ceph" (OuterVolumeSpecName: "ceph") pod "2fbbb706-71fc-47de-b788-ecb529f77d77" (UID: "2fbbb706-71fc-47de-b788-ecb529f77d77"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.780662 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2fbbb706-71fc-47de-b788-ecb529f77d77" (UID: "2fbbb706-71fc-47de-b788-ecb529f77d77"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.780725 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-inventory" (OuterVolumeSpecName: "inventory") pod "2fbbb706-71fc-47de-b788-ecb529f77d77" (UID: "2fbbb706-71fc-47de-b788-ecb529f77d77"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.852732 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pqw8\" (UniqueName: \"kubernetes.io/projected/2fbbb706-71fc-47de-b788-ecb529f77d77-kube-api-access-4pqw8\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.852794 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.852852 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:13 crc kubenswrapper[4774]: I1121 16:08:13.852870 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2fbbb706-71fc-47de-b788-ecb529f77d77-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.179417 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" event={"ID":"2fbbb706-71fc-47de-b788-ecb529f77d77","Type":"ContainerDied","Data":"a9e5c4851f09ed167829a3e94a9753372788805bb7bc416680b7ddd37131a65c"} Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.179470 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9e5c4851f09ed167829a3e94a9753372788805bb7bc416680b7ddd37131a65c" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.179514 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-openstack-openstack-cell1-z64fz" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.250099 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-openstack-openstack-cell1-sgn2s"] Nov 21 16:08:14 crc kubenswrapper[4774]: E1121 16:08:14.251207 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fbbb706-71fc-47de-b788-ecb529f77d77" containerName="validate-network-openstack-openstack-cell1" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.251240 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fbbb706-71fc-47de-b788-ecb529f77d77" containerName="validate-network-openstack-openstack-cell1" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.251659 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fbbb706-71fc-47de-b788-ecb529f77d77" containerName="validate-network-openstack-openstack-cell1" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.252955 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.260566 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.260956 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.261126 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.261276 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.264677 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-inventory\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.264797 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9khq\" (UniqueName: \"kubernetes.io/projected/fb254ff4-7f74-4263-96a4-b09712df47f1-kube-api-access-r9khq\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.264880 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ceph\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.265101 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ssh-key\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.269549 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-sgn2s"] Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.367579 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-inventory\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.367702 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9khq\" (UniqueName: \"kubernetes.io/projected/fb254ff4-7f74-4263-96a4-b09712df47f1-kube-api-access-r9khq\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.367763 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ceph\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.367837 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ssh-key\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.371695 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ssh-key\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.372546 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ceph\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.373912 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-inventory\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.389652 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9khq\" (UniqueName: \"kubernetes.io/projected/fb254ff4-7f74-4263-96a4-b09712df47f1-kube-api-access-r9khq\") pod \"install-os-openstack-openstack-cell1-sgn2s\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:14 crc kubenswrapper[4774]: I1121 16:08:14.574174 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:08:15 crc kubenswrapper[4774]: I1121 16:08:15.076700 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-openstack-openstack-cell1-sgn2s"] Nov 21 16:08:15 crc kubenswrapper[4774]: I1121 16:08:15.097391 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:08:15 crc kubenswrapper[4774]: E1121 16:08:15.097716 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:08:15 crc kubenswrapper[4774]: I1121 16:08:15.190550 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" event={"ID":"fb254ff4-7f74-4263-96a4-b09712df47f1","Type":"ContainerStarted","Data":"201e6d352c4ed02586ee8d2a599d45f5f57086005cc62023f57a892c2323b8cf"} Nov 21 16:08:16 crc kubenswrapper[4774]: I1121 16:08:16.203930 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" event={"ID":"fb254ff4-7f74-4263-96a4-b09712df47f1","Type":"ContainerStarted","Data":"2d2b5f0e4f7d888dbeb366bb6b26650d984ab88b9c48768a0564c04e172422cc"} Nov 21 16:08:16 crc kubenswrapper[4774]: I1121 16:08:16.233867 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" podStartSLOduration=1.848295856 podStartE2EDuration="2.233784945s" podCreationTimestamp="2025-11-21 16:08:14 +0000 UTC" firstStartedPulling="2025-11-21 16:08:15.083082691 +0000 UTC m=+7485.735281950" lastFinishedPulling="2025-11-21 16:08:15.46857177 +0000 UTC m=+7486.120771039" observedRunningTime="2025-11-21 16:08:16.223350107 +0000 UTC m=+7486.875549386" watchObservedRunningTime="2025-11-21 16:08:16.233784945 +0000 UTC m=+7486.885984204" Nov 21 16:08:29 crc kubenswrapper[4774]: I1121 16:08:29.092720 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:08:29 crc kubenswrapper[4774]: E1121 16:08:29.093445 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:08:43 crc kubenswrapper[4774]: I1121 16:08:43.093694 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:08:43 crc kubenswrapper[4774]: E1121 16:08:43.094588 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:08:54 crc kubenswrapper[4774]: I1121 16:08:54.094617 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:08:54 crc kubenswrapper[4774]: E1121 16:08:54.097712 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:08:59 crc kubenswrapper[4774]: I1121 16:08:59.675997 4774 generic.go:334] "Generic (PLEG): container finished" podID="fb254ff4-7f74-4263-96a4-b09712df47f1" containerID="2d2b5f0e4f7d888dbeb366bb6b26650d984ab88b9c48768a0564c04e172422cc" exitCode=0 Nov 21 16:08:59 crc kubenswrapper[4774]: I1121 16:08:59.676094 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" event={"ID":"fb254ff4-7f74-4263-96a4-b09712df47f1","Type":"ContainerDied","Data":"2d2b5f0e4f7d888dbeb366bb6b26650d984ab88b9c48768a0564c04e172422cc"} Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.120391 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.318409 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ssh-key\") pod \"fb254ff4-7f74-4263-96a4-b09712df47f1\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.318512 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9khq\" (UniqueName: \"kubernetes.io/projected/fb254ff4-7f74-4263-96a4-b09712df47f1-kube-api-access-r9khq\") pod \"fb254ff4-7f74-4263-96a4-b09712df47f1\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.318594 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-inventory\") pod \"fb254ff4-7f74-4263-96a4-b09712df47f1\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.318741 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ceph\") pod \"fb254ff4-7f74-4263-96a4-b09712df47f1\" (UID: \"fb254ff4-7f74-4263-96a4-b09712df47f1\") " Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.324718 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ceph" (OuterVolumeSpecName: "ceph") pod "fb254ff4-7f74-4263-96a4-b09712df47f1" (UID: "fb254ff4-7f74-4263-96a4-b09712df47f1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.325113 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb254ff4-7f74-4263-96a4-b09712df47f1-kube-api-access-r9khq" (OuterVolumeSpecName: "kube-api-access-r9khq") pod "fb254ff4-7f74-4263-96a4-b09712df47f1" (UID: "fb254ff4-7f74-4263-96a4-b09712df47f1"). InnerVolumeSpecName "kube-api-access-r9khq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.352452 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "fb254ff4-7f74-4263-96a4-b09712df47f1" (UID: "fb254ff4-7f74-4263-96a4-b09712df47f1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.359052 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-inventory" (OuterVolumeSpecName: "inventory") pod "fb254ff4-7f74-4263-96a4-b09712df47f1" (UID: "fb254ff4-7f74-4263-96a4-b09712df47f1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.421350 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.422029 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.422065 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9khq\" (UniqueName: \"kubernetes.io/projected/fb254ff4-7f74-4263-96a4-b09712df47f1-kube-api-access-r9khq\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.422086 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fb254ff4-7f74-4263-96a4-b09712df47f1-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.695634 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" event={"ID":"fb254ff4-7f74-4263-96a4-b09712df47f1","Type":"ContainerDied","Data":"201e6d352c4ed02586ee8d2a599d45f5f57086005cc62023f57a892c2323b8cf"} Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.695674 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="201e6d352c4ed02586ee8d2a599d45f5f57086005cc62023f57a892c2323b8cf" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.695682 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-openstack-openstack-cell1-sgn2s" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.769858 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-mklzh"] Nov 21 16:09:01 crc kubenswrapper[4774]: E1121 16:09:01.770359 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb254ff4-7f74-4263-96a4-b09712df47f1" containerName="install-os-openstack-openstack-cell1" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.770379 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb254ff4-7f74-4263-96a4-b09712df47f1" containerName="install-os-openstack-openstack-cell1" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.770614 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb254ff4-7f74-4263-96a4-b09712df47f1" containerName="install-os-openstack-openstack-cell1" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.771454 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.773690 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.773781 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.774005 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.774124 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.787805 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-mklzh"] Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.931968 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ceph\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.932578 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s526m\" (UniqueName: \"kubernetes.io/projected/7d471f88-4d25-4cce-84d1-61ac88d8a740-kube-api-access-s526m\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.932767 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ssh-key\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:01 crc kubenswrapper[4774]: I1121 16:09:01.932855 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-inventory\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.044788 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ceph\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.044874 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s526m\" (UniqueName: \"kubernetes.io/projected/7d471f88-4d25-4cce-84d1-61ac88d8a740-kube-api-access-s526m\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.044943 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ssh-key\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.044978 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-inventory\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.050564 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ssh-key\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.051087 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-inventory\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.051133 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ceph\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.063236 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s526m\" (UniqueName: \"kubernetes.io/projected/7d471f88-4d25-4cce-84d1-61ac88d8a740-kube-api-access-s526m\") pod \"configure-os-openstack-openstack-cell1-mklzh\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.091173 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.651481 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-openstack-openstack-cell1-mklzh"] Nov 21 16:09:02 crc kubenswrapper[4774]: I1121 16:09:02.707400 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" event={"ID":"7d471f88-4d25-4cce-84d1-61ac88d8a740","Type":"ContainerStarted","Data":"a8f460b16cc3efac611d0dba0bd86dbacc2b50790adcee5560d6f3a51f69957a"} Nov 21 16:09:03 crc kubenswrapper[4774]: I1121 16:09:03.716725 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" event={"ID":"7d471f88-4d25-4cce-84d1-61ac88d8a740","Type":"ContainerStarted","Data":"6cbda82bb726a64d0c9ff3cf557af993a74bc49f1f5d9f846576f9e5c031925f"} Nov 21 16:09:03 crc kubenswrapper[4774]: I1121 16:09:03.737376 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" podStartSLOduration=2.016554161 podStartE2EDuration="2.737361567s" podCreationTimestamp="2025-11-21 16:09:01 +0000 UTC" firstStartedPulling="2025-11-21 16:09:02.652555585 +0000 UTC m=+7533.304754844" lastFinishedPulling="2025-11-21 16:09:03.373362991 +0000 UTC m=+7534.025562250" observedRunningTime="2025-11-21 16:09:03.733761474 +0000 UTC m=+7534.385960733" watchObservedRunningTime="2025-11-21 16:09:03.737361567 +0000 UTC m=+7534.389560826" Nov 21 16:09:06 crc kubenswrapper[4774]: I1121 16:09:06.094420 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:09:06 crc kubenswrapper[4774]: E1121 16:09:06.095414 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:09:17 crc kubenswrapper[4774]: I1121 16:09:17.093385 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:09:17 crc kubenswrapper[4774]: E1121 16:09:17.094124 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:09:29 crc kubenswrapper[4774]: I1121 16:09:29.093636 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:09:29 crc kubenswrapper[4774]: E1121 16:09:29.094476 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:09:42 crc kubenswrapper[4774]: I1121 16:09:42.094492 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:09:42 crc kubenswrapper[4774]: E1121 16:09:42.095546 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:09:45 crc kubenswrapper[4774]: E1121 16:09:45.946974 4774 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d471f88_4d25_4cce_84d1_61ac88d8a740.slice/crio-6cbda82bb726a64d0c9ff3cf557af993a74bc49f1f5d9f846576f9e5c031925f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d471f88_4d25_4cce_84d1_61ac88d8a740.slice/crio-conmon-6cbda82bb726a64d0c9ff3cf557af993a74bc49f1f5d9f846576f9e5c031925f.scope\": RecentStats: unable to find data in memory cache]" Nov 21 16:09:46 crc kubenswrapper[4774]: I1121 16:09:46.147778 4774 generic.go:334] "Generic (PLEG): container finished" podID="7d471f88-4d25-4cce-84d1-61ac88d8a740" containerID="6cbda82bb726a64d0c9ff3cf557af993a74bc49f1f5d9f846576f9e5c031925f" exitCode=0 Nov 21 16:09:46 crc kubenswrapper[4774]: I1121 16:09:46.148290 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" event={"ID":"7d471f88-4d25-4cce-84d1-61ac88d8a740","Type":"ContainerDied","Data":"6cbda82bb726a64d0c9ff3cf557af993a74bc49f1f5d9f846576f9e5c031925f"} Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.414357 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z6986"] Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.419791 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.422870 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z6986"] Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.518086 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-utilities\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.518328 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-catalog-content\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.519478 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s4sr\" (UniqueName: \"kubernetes.io/projected/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-kube-api-access-9s4sr\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.621869 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-catalog-content\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.622027 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s4sr\" (UniqueName: \"kubernetes.io/projected/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-kube-api-access-9s4sr\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.622069 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-utilities\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.622690 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-utilities\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.622988 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-catalog-content\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.642139 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s4sr\" (UniqueName: \"kubernetes.io/projected/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-kube-api-access-9s4sr\") pod \"certified-operators-z6986\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.695930 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.757977 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.826549 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-inventory\") pod \"7d471f88-4d25-4cce-84d1-61ac88d8a740\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.826875 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ceph\") pod \"7d471f88-4d25-4cce-84d1-61ac88d8a740\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.826924 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ssh-key\") pod \"7d471f88-4d25-4cce-84d1-61ac88d8a740\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.827035 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s526m\" (UniqueName: \"kubernetes.io/projected/7d471f88-4d25-4cce-84d1-61ac88d8a740-kube-api-access-s526m\") pod \"7d471f88-4d25-4cce-84d1-61ac88d8a740\" (UID: \"7d471f88-4d25-4cce-84d1-61ac88d8a740\") " Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.840652 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ceph" (OuterVolumeSpecName: "ceph") pod "7d471f88-4d25-4cce-84d1-61ac88d8a740" (UID: "7d471f88-4d25-4cce-84d1-61ac88d8a740"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.842075 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d471f88-4d25-4cce-84d1-61ac88d8a740-kube-api-access-s526m" (OuterVolumeSpecName: "kube-api-access-s526m") pod "7d471f88-4d25-4cce-84d1-61ac88d8a740" (UID: "7d471f88-4d25-4cce-84d1-61ac88d8a740"). InnerVolumeSpecName "kube-api-access-s526m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.888132 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "7d471f88-4d25-4cce-84d1-61ac88d8a740" (UID: "7d471f88-4d25-4cce-84d1-61ac88d8a740"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.889094 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-inventory" (OuterVolumeSpecName: "inventory") pod "7d471f88-4d25-4cce-84d1-61ac88d8a740" (UID: "7d471f88-4d25-4cce-84d1-61ac88d8a740"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.933300 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s526m\" (UniqueName: \"kubernetes.io/projected/7d471f88-4d25-4cce-84d1-61ac88d8a740-kube-api-access-s526m\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.933326 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.933337 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:47 crc kubenswrapper[4774]: I1121 16:09:47.933345 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/7d471f88-4d25-4cce-84d1-61ac88d8a740-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.174009 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" event={"ID":"7d471f88-4d25-4cce-84d1-61ac88d8a740","Type":"ContainerDied","Data":"a8f460b16cc3efac611d0dba0bd86dbacc2b50790adcee5560d6f3a51f69957a"} Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.174050 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8f460b16cc3efac611d0dba0bd86dbacc2b50790adcee5560d6f3a51f69957a" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.174136 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-openstack-openstack-cell1-mklzh" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.269732 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-openstack-pf4n6"] Nov 21 16:09:48 crc kubenswrapper[4774]: E1121 16:09:48.270615 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d471f88-4d25-4cce-84d1-61ac88d8a740" containerName="configure-os-openstack-openstack-cell1" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.270635 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d471f88-4d25-4cce-84d1-61ac88d8a740" containerName="configure-os-openstack-openstack-cell1" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.270866 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d471f88-4d25-4cce-84d1-61ac88d8a740" containerName="configure-os-openstack-openstack-cell1" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.271623 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.273634 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.273811 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.277989 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.278034 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.299133 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-pf4n6"] Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.330441 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z6986"] Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.447800 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.447878 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ceph\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.447995 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fng6v\" (UniqueName: \"kubernetes.io/projected/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-kube-api-access-fng6v\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.448036 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-inventory-0\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.549801 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.549894 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ceph\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.549985 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fng6v\" (UniqueName: \"kubernetes.io/projected/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-kube-api-access-fng6v\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.550033 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-inventory-0\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.557052 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-inventory-0\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.557486 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ssh-key-openstack-cell1\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.564222 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ceph\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.568663 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fng6v\" (UniqueName: \"kubernetes.io/projected/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-kube-api-access-fng6v\") pod \"ssh-known-hosts-openstack-pf4n6\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:48 crc kubenswrapper[4774]: I1121 16:09:48.603221 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:09:49 crc kubenswrapper[4774]: I1121 16:09:49.139279 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-openstack-pf4n6"] Nov 21 16:09:49 crc kubenswrapper[4774]: I1121 16:09:49.206226 4774 generic.go:334] "Generic (PLEG): container finished" podID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerID="ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60" exitCode=0 Nov 21 16:09:49 crc kubenswrapper[4774]: I1121 16:09:49.206287 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerDied","Data":"ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60"} Nov 21 16:09:49 crc kubenswrapper[4774]: I1121 16:09:49.206665 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerStarted","Data":"92b5b0d1c9e064033cfa6ea69e4812c2f6d0e57c22efae24869d2051cada3c03"} Nov 21 16:09:49 crc kubenswrapper[4774]: I1121 16:09:49.208781 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-pf4n6" event={"ID":"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3","Type":"ContainerStarted","Data":"198ce21f485a2569d7cf9710793304dc1b29a70e824f4c93d1d70a533c2735c9"} Nov 21 16:09:51 crc kubenswrapper[4774]: I1121 16:09:51.231965 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-pf4n6" event={"ID":"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3","Type":"ContainerStarted","Data":"4eda8f1f33b9a4b935592a04dd964a5c6a460ccdfef194f13bca3fe0d9f4af80"} Nov 21 16:09:51 crc kubenswrapper[4774]: I1121 16:09:51.234176 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerStarted","Data":"1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb"} Nov 21 16:09:51 crc kubenswrapper[4774]: I1121 16:09:51.258155 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-openstack-pf4n6" podStartSLOduration=2.075691067 podStartE2EDuration="3.258133537s" podCreationTimestamp="2025-11-21 16:09:48 +0000 UTC" firstStartedPulling="2025-11-21 16:09:49.147521189 +0000 UTC m=+7579.799720448" lastFinishedPulling="2025-11-21 16:09:50.329963649 +0000 UTC m=+7580.982162918" observedRunningTime="2025-11-21 16:09:51.245568238 +0000 UTC m=+7581.897767507" watchObservedRunningTime="2025-11-21 16:09:51.258133537 +0000 UTC m=+7581.910332796" Nov 21 16:09:52 crc kubenswrapper[4774]: I1121 16:09:52.245795 4774 generic.go:334] "Generic (PLEG): container finished" podID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerID="1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb" exitCode=0 Nov 21 16:09:52 crc kubenswrapper[4774]: I1121 16:09:52.245869 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerDied","Data":"1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb"} Nov 21 16:09:53 crc kubenswrapper[4774]: I1121 16:09:53.058736 4774 scope.go:117] "RemoveContainer" containerID="d0206f188ab787748c4d3c514cb030cb5bb8387ca0c1171e222c35903970da64" Nov 21 16:09:53 crc kubenswrapper[4774]: I1121 16:09:53.081492 4774 scope.go:117] "RemoveContainer" containerID="a3f829d81f4b92b4ace3a0c834dc7c524c5f836f55e5d689c00ac975b45b74d8" Nov 21 16:09:53 crc kubenswrapper[4774]: I1121 16:09:53.131228 4774 scope.go:117] "RemoveContainer" containerID="bfc7907641e2d4c319156405a0e666c67f588346fbdb4f2e08cdea632b64f49b" Nov 21 16:09:53 crc kubenswrapper[4774]: I1121 16:09:53.259373 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerStarted","Data":"6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c"} Nov 21 16:09:53 crc kubenswrapper[4774]: I1121 16:09:53.276692 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z6986" podStartSLOduration=2.804236104 podStartE2EDuration="6.276669436s" podCreationTimestamp="2025-11-21 16:09:47 +0000 UTC" firstStartedPulling="2025-11-21 16:09:49.208364156 +0000 UTC m=+7579.860563415" lastFinishedPulling="2025-11-21 16:09:52.680797498 +0000 UTC m=+7583.332996747" observedRunningTime="2025-11-21 16:09:53.274976027 +0000 UTC m=+7583.927175296" watchObservedRunningTime="2025-11-21 16:09:53.276669436 +0000 UTC m=+7583.928868695" Nov 21 16:09:55 crc kubenswrapper[4774]: I1121 16:09:55.099599 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:09:55 crc kubenswrapper[4774]: E1121 16:09:55.100201 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:09:57 crc kubenswrapper[4774]: I1121 16:09:57.758677 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:57 crc kubenswrapper[4774]: I1121 16:09:57.759308 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:57 crc kubenswrapper[4774]: I1121 16:09:57.807534 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:58 crc kubenswrapper[4774]: I1121 16:09:58.349388 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:09:58 crc kubenswrapper[4774]: I1121 16:09:58.402687 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z6986"] Nov 21 16:09:59 crc kubenswrapper[4774]: I1121 16:09:59.312376 4774 generic.go:334] "Generic (PLEG): container finished" podID="c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" containerID="4eda8f1f33b9a4b935592a04dd964a5c6a460ccdfef194f13bca3fe0d9f4af80" exitCode=0 Nov 21 16:09:59 crc kubenswrapper[4774]: I1121 16:09:59.312456 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-pf4n6" event={"ID":"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3","Type":"ContainerDied","Data":"4eda8f1f33b9a4b935592a04dd964a5c6a460ccdfef194f13bca3fe0d9f4af80"} Nov 21 16:10:00 crc kubenswrapper[4774]: I1121 16:10:00.325520 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-z6986" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="registry-server" containerID="cri-o://6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c" gracePeriod=2 Nov 21 16:10:00 crc kubenswrapper[4774]: I1121 16:10:00.951872 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:10:00 crc kubenswrapper[4774]: I1121 16:10:00.961721 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.041054 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ceph\") pod \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.042728 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-utilities\") pod \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.042834 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-catalog-content\") pod \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.042867 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9s4sr\" (UniqueName: \"kubernetes.io/projected/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-kube-api-access-9s4sr\") pod \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\" (UID: \"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.042984 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fng6v\" (UniqueName: \"kubernetes.io/projected/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-kube-api-access-fng6v\") pod \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.043090 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-inventory-0\") pod \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.043368 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ssh-key-openstack-cell1\") pod \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\" (UID: \"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3\") " Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.044044 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-utilities" (OuterVolumeSpecName: "utilities") pod "34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" (UID: "34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.044736 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.047396 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-kube-api-access-9s4sr" (OuterVolumeSpecName: "kube-api-access-9s4sr") pod "34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" (UID: "34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad"). InnerVolumeSpecName "kube-api-access-9s4sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.047460 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ceph" (OuterVolumeSpecName: "ceph") pod "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" (UID: "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.051770 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-kube-api-access-fng6v" (OuterVolumeSpecName: "kube-api-access-fng6v") pod "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" (UID: "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3"). InnerVolumeSpecName "kube-api-access-fng6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.075072 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" (UID: "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.081941 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ssh-key-openstack-cell1" (OuterVolumeSpecName: "ssh-key-openstack-cell1") pod "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" (UID: "c2331cdc-adfd-4e78-b7eb-c91e7518b9c3"). InnerVolumeSpecName "ssh-key-openstack-cell1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.092949 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" (UID: "34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.147264 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-cell1\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ssh-key-openstack-cell1\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.147321 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.147339 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.147355 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9s4sr\" (UniqueName: \"kubernetes.io/projected/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad-kube-api-access-9s4sr\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.147371 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fng6v\" (UniqueName: \"kubernetes.io/projected/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-kube-api-access-fng6v\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.147385 4774 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/c2331cdc-adfd-4e78-b7eb-c91e7518b9c3-inventory-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.335593 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-openstack-pf4n6" event={"ID":"c2331cdc-adfd-4e78-b7eb-c91e7518b9c3","Type":"ContainerDied","Data":"198ce21f485a2569d7cf9710793304dc1b29a70e824f4c93d1d70a533c2735c9"} Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.335856 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="198ce21f485a2569d7cf9710793304dc1b29a70e824f4c93d1d70a533c2735c9" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.335634 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-openstack-pf4n6" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.340409 4774 generic.go:334] "Generic (PLEG): container finished" podID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerID="6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c" exitCode=0 Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.340438 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerDied","Data":"6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c"} Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.340456 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z6986" event={"ID":"34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad","Type":"ContainerDied","Data":"92b5b0d1c9e064033cfa6ea69e4812c2f6d0e57c22efae24869d2051cada3c03"} Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.340473 4774 scope.go:117] "RemoveContainer" containerID="6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.340900 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z6986" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.391237 4774 scope.go:117] "RemoveContainer" containerID="1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.402988 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-z6986"] Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.416241 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-z6986"] Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.429163 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-openstack-openstack-cell1-d4tnk"] Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.429797 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="extract-content" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.429840 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="extract-content" Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.429863 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="registry-server" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.429873 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="registry-server" Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.429894 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" containerName="ssh-known-hosts-openstack" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.429901 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" containerName="ssh-known-hosts-openstack" Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.429922 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="extract-utilities" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.429930 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="extract-utilities" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.430194 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" containerName="registry-server" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.430236 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2331cdc-adfd-4e78-b7eb-c91e7518b9c3" containerName="ssh-known-hosts-openstack" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.431506 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.433796 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.433858 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.433902 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.435490 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.438587 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-d4tnk"] Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.442163 4774 scope.go:117] "RemoveContainer" containerID="ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.511493 4774 scope.go:117] "RemoveContainer" containerID="6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c" Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.511925 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c\": container with ID starting with 6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c not found: ID does not exist" containerID="6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.511976 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c"} err="failed to get container status \"6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c\": rpc error: code = NotFound desc = could not find container \"6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c\": container with ID starting with 6da87aa4613774c3901cfc7f98d6b412d795a43bce255cbc57051ea36dc2899c not found: ID does not exist" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.512008 4774 scope.go:117] "RemoveContainer" containerID="1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb" Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.512400 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb\": container with ID starting with 1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb not found: ID does not exist" containerID="1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.512453 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb"} err="failed to get container status \"1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb\": rpc error: code = NotFound desc = could not find container \"1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb\": container with ID starting with 1263af37a70079fefc8cc078698c72ec9e6b3051cdfc1cb53168317df1cf6feb not found: ID does not exist" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.512490 4774 scope.go:117] "RemoveContainer" containerID="ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60" Nov 21 16:10:01 crc kubenswrapper[4774]: E1121 16:10:01.512946 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60\": container with ID starting with ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60 not found: ID does not exist" containerID="ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.512982 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60"} err="failed to get container status \"ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60\": rpc error: code = NotFound desc = could not find container \"ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60\": container with ID starting with ac7eb879374f64407ba58f4d944f05b99cab07cecd174055e40557af01341b60 not found: ID does not exist" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.557326 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ceph\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.557448 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7lwn\" (UniqueName: \"kubernetes.io/projected/c084e8b3-667e-4b63-b370-a667a4b0cda6-kube-api-access-k7lwn\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.557516 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ssh-key\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.557779 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-inventory\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.660414 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ceph\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.660507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7lwn\" (UniqueName: \"kubernetes.io/projected/c084e8b3-667e-4b63-b370-a667a4b0cda6-kube-api-access-k7lwn\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.660540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ssh-key\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.660579 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-inventory\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.665263 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ceph\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.665297 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-inventory\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.668237 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ssh-key\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.692351 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7lwn\" (UniqueName: \"kubernetes.io/projected/c084e8b3-667e-4b63-b370-a667a4b0cda6-kube-api-access-k7lwn\") pod \"run-os-openstack-openstack-cell1-d4tnk\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:01 crc kubenswrapper[4774]: I1121 16:10:01.851677 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:02 crc kubenswrapper[4774]: I1121 16:10:02.112055 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad" path="/var/lib/kubelet/pods/34a2ac9a-9c6e-4b61-982d-d8d22f5b74ad/volumes" Nov 21 16:10:02 crc kubenswrapper[4774]: I1121 16:10:02.375792 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-openstack-openstack-cell1-d4tnk"] Nov 21 16:10:02 crc kubenswrapper[4774]: W1121 16:10:02.379685 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc084e8b3_667e_4b63_b370_a667a4b0cda6.slice/crio-8c7b85d30420fe31fde0251ceacd029b49f792d0d9230b453094f630d7d563d8 WatchSource:0}: Error finding container 8c7b85d30420fe31fde0251ceacd029b49f792d0d9230b453094f630d7d563d8: Status 404 returned error can't find the container with id 8c7b85d30420fe31fde0251ceacd029b49f792d0d9230b453094f630d7d563d8 Nov 21 16:10:03 crc kubenswrapper[4774]: I1121 16:10:03.362470 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" event={"ID":"c084e8b3-667e-4b63-b370-a667a4b0cda6","Type":"ContainerStarted","Data":"8c7b85d30420fe31fde0251ceacd029b49f792d0d9230b453094f630d7d563d8"} Nov 21 16:10:04 crc kubenswrapper[4774]: I1121 16:10:04.416737 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" event={"ID":"c084e8b3-667e-4b63-b370-a667a4b0cda6","Type":"ContainerStarted","Data":"e27bf33adde1dfcfdff65ab59d3227c48e5a3b4f7820e4ec98f930bab0d60057"} Nov 21 16:10:04 crc kubenswrapper[4774]: I1121 16:10:04.451663 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" podStartSLOduration=2.4692736809999998 podStartE2EDuration="3.451641258s" podCreationTimestamp="2025-11-21 16:10:01 +0000 UTC" firstStartedPulling="2025-11-21 16:10:02.382584366 +0000 UTC m=+7593.034783625" lastFinishedPulling="2025-11-21 16:10:03.364951943 +0000 UTC m=+7594.017151202" observedRunningTime="2025-11-21 16:10:04.4366475 +0000 UTC m=+7595.088846759" watchObservedRunningTime="2025-11-21 16:10:04.451641258 +0000 UTC m=+7595.103840527" Nov 21 16:10:08 crc kubenswrapper[4774]: I1121 16:10:08.097287 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:10:08 crc kubenswrapper[4774]: I1121 16:10:08.489468 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"f694dbded2ebe51cf76565e124d2918bd57d74fbf4e28477972783a450f186b5"} Nov 21 16:10:12 crc kubenswrapper[4774]: I1121 16:10:12.533042 4774 generic.go:334] "Generic (PLEG): container finished" podID="c084e8b3-667e-4b63-b370-a667a4b0cda6" containerID="e27bf33adde1dfcfdff65ab59d3227c48e5a3b4f7820e4ec98f930bab0d60057" exitCode=0 Nov 21 16:10:12 crc kubenswrapper[4774]: I1121 16:10:12.533162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" event={"ID":"c084e8b3-667e-4b63-b370-a667a4b0cda6","Type":"ContainerDied","Data":"e27bf33adde1dfcfdff65ab59d3227c48e5a3b4f7820e4ec98f930bab0d60057"} Nov 21 16:10:13 crc kubenswrapper[4774]: I1121 16:10:13.968637 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.001341 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ssh-key\") pod \"c084e8b3-667e-4b63-b370-a667a4b0cda6\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.001468 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ceph\") pod \"c084e8b3-667e-4b63-b370-a667a4b0cda6\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.001555 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-inventory\") pod \"c084e8b3-667e-4b63-b370-a667a4b0cda6\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.001644 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7lwn\" (UniqueName: \"kubernetes.io/projected/c084e8b3-667e-4b63-b370-a667a4b0cda6-kube-api-access-k7lwn\") pod \"c084e8b3-667e-4b63-b370-a667a4b0cda6\" (UID: \"c084e8b3-667e-4b63-b370-a667a4b0cda6\") " Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.010176 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ceph" (OuterVolumeSpecName: "ceph") pod "c084e8b3-667e-4b63-b370-a667a4b0cda6" (UID: "c084e8b3-667e-4b63-b370-a667a4b0cda6"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.010286 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c084e8b3-667e-4b63-b370-a667a4b0cda6-kube-api-access-k7lwn" (OuterVolumeSpecName: "kube-api-access-k7lwn") pod "c084e8b3-667e-4b63-b370-a667a4b0cda6" (UID: "c084e8b3-667e-4b63-b370-a667a4b0cda6"). InnerVolumeSpecName "kube-api-access-k7lwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.034216 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-inventory" (OuterVolumeSpecName: "inventory") pod "c084e8b3-667e-4b63-b370-a667a4b0cda6" (UID: "c084e8b3-667e-4b63-b370-a667a4b0cda6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.042862 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c084e8b3-667e-4b63-b370-a667a4b0cda6" (UID: "c084e8b3-667e-4b63-b370-a667a4b0cda6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.102963 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.102995 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.103007 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7lwn\" (UniqueName: \"kubernetes.io/projected/c084e8b3-667e-4b63-b370-a667a4b0cda6-kube-api-access-k7lwn\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.103015 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c084e8b3-667e-4b63-b370-a667a4b0cda6-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.554122 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" event={"ID":"c084e8b3-667e-4b63-b370-a667a4b0cda6","Type":"ContainerDied","Data":"8c7b85d30420fe31fde0251ceacd029b49f792d0d9230b453094f630d7d563d8"} Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.554441 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c7b85d30420fe31fde0251ceacd029b49f792d0d9230b453094f630d7d563d8" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.554164 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-openstack-openstack-cell1-d4tnk" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.610785 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-tnwgq"] Nov 21 16:10:14 crc kubenswrapper[4774]: E1121 16:10:14.611323 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c084e8b3-667e-4b63-b370-a667a4b0cda6" containerName="run-os-openstack-openstack-cell1" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.611341 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c084e8b3-667e-4b63-b370-a667a4b0cda6" containerName="run-os-openstack-openstack-cell1" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.611645 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c084e8b3-667e-4b63-b370-a667a4b0cda6" containerName="run-os-openstack-openstack-cell1" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.612530 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.614351 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.614534 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.615067 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.616123 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.620287 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-tnwgq"] Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.815760 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ceph\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.815836 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.816624 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl2hp\" (UniqueName: \"kubernetes.io/projected/98752e83-384a-4828-b900-1b8b62522ece-kube-api-access-nl2hp\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.816762 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-inventory\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.918994 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ceph\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.919073 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.919120 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl2hp\" (UniqueName: \"kubernetes.io/projected/98752e83-384a-4828-b900-1b8b62522ece-kube-api-access-nl2hp\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.919191 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-inventory\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.923485 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ssh-key\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.924079 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ceph\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.924253 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-inventory\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:14 crc kubenswrapper[4774]: I1121 16:10:14.934372 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl2hp\" (UniqueName: \"kubernetes.io/projected/98752e83-384a-4828-b900-1b8b62522ece-kube-api-access-nl2hp\") pod \"reboot-os-openstack-openstack-cell1-tnwgq\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:15 crc kubenswrapper[4774]: I1121 16:10:15.234432 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:15 crc kubenswrapper[4774]: I1121 16:10:15.773529 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-openstack-openstack-cell1-tnwgq"] Nov 21 16:10:16 crc kubenswrapper[4774]: I1121 16:10:16.575191 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" event={"ID":"98752e83-384a-4828-b900-1b8b62522ece","Type":"ContainerStarted","Data":"5632468ace31860af67f2d4cecc887a11ba2a2b54eb17a6fbe38edcd272a6acf"} Nov 21 16:10:17 crc kubenswrapper[4774]: I1121 16:10:17.585032 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" event={"ID":"98752e83-384a-4828-b900-1b8b62522ece","Type":"ContainerStarted","Data":"c82171bebb7a9ba263907e6f2464a5aaff80d1cf212136b2dccdea0a6543d1db"} Nov 21 16:10:17 crc kubenswrapper[4774]: I1121 16:10:17.602159 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" podStartSLOduration=2.8616001620000002 podStartE2EDuration="3.602139451s" podCreationTimestamp="2025-11-21 16:10:14 +0000 UTC" firstStartedPulling="2025-11-21 16:10:15.767953888 +0000 UTC m=+7606.420153147" lastFinishedPulling="2025-11-21 16:10:16.508493177 +0000 UTC m=+7607.160692436" observedRunningTime="2025-11-21 16:10:17.597930251 +0000 UTC m=+7608.250129510" watchObservedRunningTime="2025-11-21 16:10:17.602139451 +0000 UTC m=+7608.254338710" Nov 21 16:10:32 crc kubenswrapper[4774]: I1121 16:10:32.737844 4774 generic.go:334] "Generic (PLEG): container finished" podID="98752e83-384a-4828-b900-1b8b62522ece" containerID="c82171bebb7a9ba263907e6f2464a5aaff80d1cf212136b2dccdea0a6543d1db" exitCode=0 Nov 21 16:10:32 crc kubenswrapper[4774]: I1121 16:10:32.737940 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" event={"ID":"98752e83-384a-4828-b900-1b8b62522ece","Type":"ContainerDied","Data":"c82171bebb7a9ba263907e6f2464a5aaff80d1cf212136b2dccdea0a6543d1db"} Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.241185 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.325661 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-inventory\") pod \"98752e83-384a-4828-b900-1b8b62522ece\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.326000 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl2hp\" (UniqueName: \"kubernetes.io/projected/98752e83-384a-4828-b900-1b8b62522ece-kube-api-access-nl2hp\") pod \"98752e83-384a-4828-b900-1b8b62522ece\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.326077 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ceph\") pod \"98752e83-384a-4828-b900-1b8b62522ece\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.326249 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ssh-key\") pod \"98752e83-384a-4828-b900-1b8b62522ece\" (UID: \"98752e83-384a-4828-b900-1b8b62522ece\") " Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.331933 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98752e83-384a-4828-b900-1b8b62522ece-kube-api-access-nl2hp" (OuterVolumeSpecName: "kube-api-access-nl2hp") pod "98752e83-384a-4828-b900-1b8b62522ece" (UID: "98752e83-384a-4828-b900-1b8b62522ece"). InnerVolumeSpecName "kube-api-access-nl2hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.333907 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ceph" (OuterVolumeSpecName: "ceph") pod "98752e83-384a-4828-b900-1b8b62522ece" (UID: "98752e83-384a-4828-b900-1b8b62522ece"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.374050 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-inventory" (OuterVolumeSpecName: "inventory") pod "98752e83-384a-4828-b900-1b8b62522ece" (UID: "98752e83-384a-4828-b900-1b8b62522ece"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.377925 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "98752e83-384a-4828-b900-1b8b62522ece" (UID: "98752e83-384a-4828-b900-1b8b62522ece"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.428897 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.428950 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.428966 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl2hp\" (UniqueName: \"kubernetes.io/projected/98752e83-384a-4828-b900-1b8b62522ece-kube-api-access-nl2hp\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.428980 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/98752e83-384a-4828-b900-1b8b62522ece-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.766120 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" event={"ID":"98752e83-384a-4828-b900-1b8b62522ece","Type":"ContainerDied","Data":"5632468ace31860af67f2d4cecc887a11ba2a2b54eb17a6fbe38edcd272a6acf"} Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.766425 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5632468ace31860af67f2d4cecc887a11ba2a2b54eb17a6fbe38edcd272a6acf" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.766721 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-openstack-openstack-cell1-tnwgq" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.865182 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-t8cr4"] Nov 21 16:10:34 crc kubenswrapper[4774]: E1121 16:10:34.866199 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98752e83-384a-4828-b900-1b8b62522ece" containerName="reboot-os-openstack-openstack-cell1" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.866226 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="98752e83-384a-4828-b900-1b8b62522ece" containerName="reboot-os-openstack-openstack-cell1" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.866589 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="98752e83-384a-4828-b900-1b8b62522ece" containerName="reboot-os-openstack-openstack-cell1" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.867763 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.870626 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.870623 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.871085 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.878219 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.880685 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-t8cr4"] Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.938798 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.938874 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ssh-key\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.938913 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k8bb8\" (UniqueName: \"kubernetes.io/projected/ee250856-b49e-4620-9067-bd30f9324f0b-kube-api-access-k8bb8\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939000 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939024 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939049 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939071 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939089 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939106 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-inventory\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939184 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ceph\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939205 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:34 crc kubenswrapper[4774]: I1121 16:10:34.939234 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.040760 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.040830 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.040897 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.040926 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.040952 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.040978 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-inventory\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.041032 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ceph\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.041060 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.041099 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.041159 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.041198 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ssh-key\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.041236 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k8bb8\" (UniqueName: \"kubernetes.io/projected/ee250856-b49e-4620-9067-bd30f9324f0b-kube-api-access-k8bb8\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.045296 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.045425 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-nova-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.045590 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ceph\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.046843 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-libvirt-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.048100 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ssh-key\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.048966 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-inventory\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.049476 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ovn-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.049507 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-sriov-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.050258 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.051644 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-bootstrap-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.056116 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-telemetry-combined-ca-bundle\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.062051 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k8bb8\" (UniqueName: \"kubernetes.io/projected/ee250856-b49e-4620-9067-bd30f9324f0b-kube-api-access-k8bb8\") pod \"install-certs-openstack-openstack-cell1-t8cr4\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.198064 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.765597 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-openstack-openstack-cell1-t8cr4"] Nov 21 16:10:35 crc kubenswrapper[4774]: I1121 16:10:35.781211 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" event={"ID":"ee250856-b49e-4620-9067-bd30f9324f0b","Type":"ContainerStarted","Data":"a008e85d5eab1e0304a07a2e47612eb2aa1076b4c08b5d5405071c23e5b82543"} Nov 21 16:10:36 crc kubenswrapper[4774]: I1121 16:10:36.798904 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" event={"ID":"ee250856-b49e-4620-9067-bd30f9324f0b","Type":"ContainerStarted","Data":"31ce717332874c68621f1b62fae7271d0e95fced643dbf01d0116dcd89addc66"} Nov 21 16:10:36 crc kubenswrapper[4774]: I1121 16:10:36.817478 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" podStartSLOduration=2.375927543 podStartE2EDuration="2.817457283s" podCreationTimestamp="2025-11-21 16:10:34 +0000 UTC" firstStartedPulling="2025-11-21 16:10:35.764201143 +0000 UTC m=+7626.416400402" lastFinishedPulling="2025-11-21 16:10:36.205730883 +0000 UTC m=+7626.857930142" observedRunningTime="2025-11-21 16:10:36.816404203 +0000 UTC m=+7627.468603462" watchObservedRunningTime="2025-11-21 16:10:36.817457283 +0000 UTC m=+7627.469656542" Nov 21 16:10:54 crc kubenswrapper[4774]: I1121 16:10:54.975086 4774 generic.go:334] "Generic (PLEG): container finished" podID="ee250856-b49e-4620-9067-bd30f9324f0b" containerID="31ce717332874c68621f1b62fae7271d0e95fced643dbf01d0116dcd89addc66" exitCode=0 Nov 21 16:10:54 crc kubenswrapper[4774]: I1121 16:10:54.975355 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" event={"ID":"ee250856-b49e-4620-9067-bd30f9324f0b","Type":"ContainerDied","Data":"31ce717332874c68621f1b62fae7271d0e95fced643dbf01d0116dcd89addc66"} Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.483399 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591036 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-telemetry-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591084 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k8bb8\" (UniqueName: \"kubernetes.io/projected/ee250856-b49e-4620-9067-bd30f9324f0b-kube-api-access-k8bb8\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591123 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-libvirt-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591168 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-nova-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591209 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-bootstrap-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591265 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-dhcp-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591315 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ceph\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591334 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-sriov-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591352 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-inventory\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591384 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ssh-key\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591423 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-metadata-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.591440 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ovn-combined-ca-bundle\") pod \"ee250856-b49e-4620-9067-bd30f9324f0b\" (UID: \"ee250856-b49e-4620-9067-bd30f9324f0b\") " Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.598795 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.598842 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.598868 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.598953 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.599252 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ceph" (OuterVolumeSpecName: "ceph") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.599269 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.599404 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee250856-b49e-4620-9067-bd30f9324f0b-kube-api-access-k8bb8" (OuterVolumeSpecName: "kube-api-access-k8bb8") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "kube-api-access-k8bb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.599649 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.601776 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.602363 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.624397 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-inventory" (OuterVolumeSpecName: "inventory") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.625431 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ee250856-b49e-4620-9067-bd30f9324f0b" (UID: "ee250856-b49e-4620-9067-bd30f9324f0b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693751 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693793 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693808 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693839 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693850 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693865 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693879 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693894 4774 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693905 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k8bb8\" (UniqueName: \"kubernetes.io/projected/ee250856-b49e-4620-9067-bd30f9324f0b-kube-api-access-k8bb8\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693916 4774 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693926 4774 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:56 crc kubenswrapper[4774]: I1121 16:10:56.693938 4774 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee250856-b49e-4620-9067-bd30f9324f0b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.001757 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" event={"ID":"ee250856-b49e-4620-9067-bd30f9324f0b","Type":"ContainerDied","Data":"a008e85d5eab1e0304a07a2e47612eb2aa1076b4c08b5d5405071c23e5b82543"} Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.001808 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a008e85d5eab1e0304a07a2e47612eb2aa1076b4c08b5d5405071c23e5b82543" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.002190 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-openstack-openstack-cell1-t8cr4" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.089296 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-vtdwk"] Nov 21 16:10:57 crc kubenswrapper[4774]: E1121 16:10:57.090143 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee250856-b49e-4620-9067-bd30f9324f0b" containerName="install-certs-openstack-openstack-cell1" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.090234 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee250856-b49e-4620-9067-bd30f9324f0b" containerName="install-certs-openstack-openstack-cell1" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.090545 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee250856-b49e-4620-9067-bd30f9324f0b" containerName="install-certs-openstack-openstack-cell1" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.092289 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.095327 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.095682 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.099050 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.100175 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.121195 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-vtdwk"] Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.204203 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ceph\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.204296 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.204380 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-inventory\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.204413 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt8ws\" (UniqueName: \"kubernetes.io/projected/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-kube-api-access-mt8ws\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.306564 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.306718 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-inventory\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.306754 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt8ws\" (UniqueName: \"kubernetes.io/projected/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-kube-api-access-mt8ws\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.306876 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ceph\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.311706 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ssh-key\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.311733 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ceph\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.320680 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-inventory\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.327810 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt8ws\" (UniqueName: \"kubernetes.io/projected/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-kube-api-access-mt8ws\") pod \"ceph-client-openstack-openstack-cell1-vtdwk\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.419566 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:10:57 crc kubenswrapper[4774]: I1121 16:10:57.984367 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-client-openstack-openstack-cell1-vtdwk"] Nov 21 16:10:58 crc kubenswrapper[4774]: I1121 16:10:58.024314 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" event={"ID":"63fc2b59-5dcc-4b9f-a8c0-0877f1490778","Type":"ContainerStarted","Data":"cb382c17124e5a6a50a7e8555758db20eb215720b3d29e6c0f06afb3925242aa"} Nov 21 16:11:00 crc kubenswrapper[4774]: I1121 16:11:00.043465 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" event={"ID":"63fc2b59-5dcc-4b9f-a8c0-0877f1490778","Type":"ContainerStarted","Data":"81920dc4f94fedf51bc8ad9dd771480ae88ab25ddc2fd4fe2372838f97651498"} Nov 21 16:11:00 crc kubenswrapper[4774]: I1121 16:11:00.079910 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" podStartSLOduration=2.028079631 podStartE2EDuration="3.079891579s" podCreationTimestamp="2025-11-21 16:10:57 +0000 UTC" firstStartedPulling="2025-11-21 16:10:57.98954673 +0000 UTC m=+7648.641745989" lastFinishedPulling="2025-11-21 16:10:59.041358658 +0000 UTC m=+7649.693557937" observedRunningTime="2025-11-21 16:11:00.073298971 +0000 UTC m=+7650.725498230" watchObservedRunningTime="2025-11-21 16:11:00.079891579 +0000 UTC m=+7650.732090828" Nov 21 16:11:04 crc kubenswrapper[4774]: I1121 16:11:04.082685 4774 generic.go:334] "Generic (PLEG): container finished" podID="63fc2b59-5dcc-4b9f-a8c0-0877f1490778" containerID="81920dc4f94fedf51bc8ad9dd771480ae88ab25ddc2fd4fe2372838f97651498" exitCode=0 Nov 21 16:11:04 crc kubenswrapper[4774]: I1121 16:11:04.082772 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" event={"ID":"63fc2b59-5dcc-4b9f-a8c0-0877f1490778","Type":"ContainerDied","Data":"81920dc4f94fedf51bc8ad9dd771480ae88ab25ddc2fd4fe2372838f97651498"} Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.526363 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.677002 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-inventory\") pod \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.677398 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt8ws\" (UniqueName: \"kubernetes.io/projected/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-kube-api-access-mt8ws\") pod \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.677686 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ceph\") pod \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.677810 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ssh-key\") pod \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\" (UID: \"63fc2b59-5dcc-4b9f-a8c0-0877f1490778\") " Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.699459 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-kube-api-access-mt8ws" (OuterVolumeSpecName: "kube-api-access-mt8ws") pod "63fc2b59-5dcc-4b9f-a8c0-0877f1490778" (UID: "63fc2b59-5dcc-4b9f-a8c0-0877f1490778"). InnerVolumeSpecName "kube-api-access-mt8ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.717974 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ceph" (OuterVolumeSpecName: "ceph") pod "63fc2b59-5dcc-4b9f-a8c0-0877f1490778" (UID: "63fc2b59-5dcc-4b9f-a8c0-0877f1490778"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.739767 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-inventory" (OuterVolumeSpecName: "inventory") pod "63fc2b59-5dcc-4b9f-a8c0-0877f1490778" (UID: "63fc2b59-5dcc-4b9f-a8c0-0877f1490778"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.739893 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "63fc2b59-5dcc-4b9f-a8c0-0877f1490778" (UID: "63fc2b59-5dcc-4b9f-a8c0-0877f1490778"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.780741 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.780771 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt8ws\" (UniqueName: \"kubernetes.io/projected/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-kube-api-access-mt8ws\") on node \"crc\" DevicePath \"\"" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.780782 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:11:05 crc kubenswrapper[4774]: I1121 16:11:05.780791 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63fc2b59-5dcc-4b9f-a8c0-0877f1490778-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.110133 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.112215 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-client-openstack-openstack-cell1-vtdwk" event={"ID":"63fc2b59-5dcc-4b9f-a8c0-0877f1490778","Type":"ContainerDied","Data":"cb382c17124e5a6a50a7e8555758db20eb215720b3d29e6c0f06afb3925242aa"} Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.112256 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb382c17124e5a6a50a7e8555758db20eb215720b3d29e6c0f06afb3925242aa" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.193373 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-openstack-openstack-cell1-nz7qj"] Nov 21 16:11:06 crc kubenswrapper[4774]: E1121 16:11:06.193954 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63fc2b59-5dcc-4b9f-a8c0-0877f1490778" containerName="ceph-client-openstack-openstack-cell1" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.193977 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="63fc2b59-5dcc-4b9f-a8c0-0877f1490778" containerName="ceph-client-openstack-openstack-cell1" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.194242 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="63fc2b59-5dcc-4b9f-a8c0-0877f1490778" containerName="ceph-client-openstack-openstack-cell1" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.195197 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.198158 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.199357 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.199375 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.199545 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.206136 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.215589 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-nz7qj"] Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.292059 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/687eea4a-3909-493e-a5a6-74f84cd247b1-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.292455 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ssh-key\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.292546 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.292666 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zft8h\" (UniqueName: \"kubernetes.io/projected/687eea4a-3909-493e-a5a6-74f84cd247b1-kube-api-access-zft8h\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.292739 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-inventory\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.292808 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ceph\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.394692 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zft8h\" (UniqueName: \"kubernetes.io/projected/687eea4a-3909-493e-a5a6-74f84cd247b1-kube-api-access-zft8h\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.394750 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-inventory\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.394804 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ceph\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.394898 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/687eea4a-3909-493e-a5a6-74f84cd247b1-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.394956 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ssh-key\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.395035 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.395888 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/687eea4a-3909-493e-a5a6-74f84cd247b1-ovncontroller-config-0\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.398943 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ceph\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.399400 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ovn-combined-ca-bundle\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.399517 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ssh-key\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.402295 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-inventory\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.415363 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zft8h\" (UniqueName: \"kubernetes.io/projected/687eea4a-3909-493e-a5a6-74f84cd247b1-kube-api-access-zft8h\") pod \"ovn-openstack-openstack-cell1-nz7qj\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:06 crc kubenswrapper[4774]: I1121 16:11:06.514477 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:11:07 crc kubenswrapper[4774]: I1121 16:11:07.082559 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-openstack-openstack-cell1-nz7qj"] Nov 21 16:11:07 crc kubenswrapper[4774]: I1121 16:11:07.120065 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" event={"ID":"687eea4a-3909-493e-a5a6-74f84cd247b1","Type":"ContainerStarted","Data":"e36344f723804c1b14e6e294c1883728699b0c020544abbc2dcc543fa6aa0078"} Nov 21 16:11:09 crc kubenswrapper[4774]: I1121 16:11:09.138449 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" event={"ID":"687eea4a-3909-493e-a5a6-74f84cd247b1","Type":"ContainerStarted","Data":"f962d090af80c1931ea39d651f6a9bd571f0ec2a10da4a63262c09d3386cd3ef"} Nov 21 16:11:09 crc kubenswrapper[4774]: I1121 16:11:09.156335 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" podStartSLOduration=2.093971619 podStartE2EDuration="3.156316299s" podCreationTimestamp="2025-11-21 16:11:06 +0000 UTC" firstStartedPulling="2025-11-21 16:11:07.093549327 +0000 UTC m=+7657.745748606" lastFinishedPulling="2025-11-21 16:11:08.155894027 +0000 UTC m=+7658.808093286" observedRunningTime="2025-11-21 16:11:09.154217499 +0000 UTC m=+7659.806416778" watchObservedRunningTime="2025-11-21 16:11:09.156316299 +0000 UTC m=+7659.808515558" Nov 21 16:12:12 crc kubenswrapper[4774]: I1121 16:12:12.799067 4774 generic.go:334] "Generic (PLEG): container finished" podID="687eea4a-3909-493e-a5a6-74f84cd247b1" containerID="f962d090af80c1931ea39d651f6a9bd571f0ec2a10da4a63262c09d3386cd3ef" exitCode=0 Nov 21 16:12:12 crc kubenswrapper[4774]: I1121 16:12:12.799287 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" event={"ID":"687eea4a-3909-493e-a5a6-74f84cd247b1","Type":"ContainerDied","Data":"f962d090af80c1931ea39d651f6a9bd571f0ec2a10da4a63262c09d3386cd3ef"} Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.252235 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.368129 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ovn-combined-ca-bundle\") pod \"687eea4a-3909-493e-a5a6-74f84cd247b1\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.368301 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zft8h\" (UniqueName: \"kubernetes.io/projected/687eea4a-3909-493e-a5a6-74f84cd247b1-kube-api-access-zft8h\") pod \"687eea4a-3909-493e-a5a6-74f84cd247b1\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.368352 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ssh-key\") pod \"687eea4a-3909-493e-a5a6-74f84cd247b1\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.368484 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-inventory\") pod \"687eea4a-3909-493e-a5a6-74f84cd247b1\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.368539 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ceph\") pod \"687eea4a-3909-493e-a5a6-74f84cd247b1\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.368575 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/687eea4a-3909-493e-a5a6-74f84cd247b1-ovncontroller-config-0\") pod \"687eea4a-3909-493e-a5a6-74f84cd247b1\" (UID: \"687eea4a-3909-493e-a5a6-74f84cd247b1\") " Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.375557 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/687eea4a-3909-493e-a5a6-74f84cd247b1-kube-api-access-zft8h" (OuterVolumeSpecName: "kube-api-access-zft8h") pod "687eea4a-3909-493e-a5a6-74f84cd247b1" (UID: "687eea4a-3909-493e-a5a6-74f84cd247b1"). InnerVolumeSpecName "kube-api-access-zft8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.383950 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "687eea4a-3909-493e-a5a6-74f84cd247b1" (UID: "687eea4a-3909-493e-a5a6-74f84cd247b1"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.384011 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ceph" (OuterVolumeSpecName: "ceph") pod "687eea4a-3909-493e-a5a6-74f84cd247b1" (UID: "687eea4a-3909-493e-a5a6-74f84cd247b1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.395784 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/687eea4a-3909-493e-a5a6-74f84cd247b1-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "687eea4a-3909-493e-a5a6-74f84cd247b1" (UID: "687eea4a-3909-493e-a5a6-74f84cd247b1"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.399320 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "687eea4a-3909-493e-a5a6-74f84cd247b1" (UID: "687eea4a-3909-493e-a5a6-74f84cd247b1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.408896 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-inventory" (OuterVolumeSpecName: "inventory") pod "687eea4a-3909-493e-a5a6-74f84cd247b1" (UID: "687eea4a-3909-493e-a5a6-74f84cd247b1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.470626 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zft8h\" (UniqueName: \"kubernetes.io/projected/687eea4a-3909-493e-a5a6-74f84cd247b1-kube-api-access-zft8h\") on node \"crc\" DevicePath \"\"" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.470656 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.470669 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.470681 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.470696 4774 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/687eea4a-3909-493e-a5a6-74f84cd247b1-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.470727 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/687eea4a-3909-493e-a5a6-74f84cd247b1-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.826267 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" event={"ID":"687eea4a-3909-493e-a5a6-74f84cd247b1","Type":"ContainerDied","Data":"e36344f723804c1b14e6e294c1883728699b0c020544abbc2dcc543fa6aa0078"} Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.826312 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e36344f723804c1b14e6e294c1883728699b0c020544abbc2dcc543fa6aa0078" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.826339 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-openstack-openstack-cell1-nz7qj" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.927467 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-s78c9"] Nov 21 16:12:14 crc kubenswrapper[4774]: E1121 16:12:14.927994 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="687eea4a-3909-493e-a5a6-74f84cd247b1" containerName="ovn-openstack-openstack-cell1" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.928018 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="687eea4a-3909-493e-a5a6-74f84cd247b1" containerName="ovn-openstack-openstack-cell1" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.928277 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="687eea4a-3909-493e-a5a6-74f84cd247b1" containerName="ovn-openstack-openstack-cell1" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.929111 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.932732 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.932732 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.932769 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.932801 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.933877 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.951634 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.965545 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-s78c9"] Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.981758 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.981888 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.981966 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.981999 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.982089 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.982728 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4xcl\" (UniqueName: \"kubernetes.io/projected/4fd555c3-cec8-4965-8851-4fbe8106ec02-kube-api-access-h4xcl\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:14 crc kubenswrapper[4774]: I1121 16:12:14.982788 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084105 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4xcl\" (UniqueName: \"kubernetes.io/projected/4fd555c3-cec8-4965-8851-4fbe8106ec02-kube-api-access-h4xcl\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084414 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084458 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084519 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084560 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084582 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.084639 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.089495 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-inventory\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.089777 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ssh-key\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.090416 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.090570 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ceph\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.090955 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.091716 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-nova-metadata-neutron-config-0\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.102886 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4xcl\" (UniqueName: \"kubernetes.io/projected/4fd555c3-cec8-4965-8851-4fbe8106ec02-kube-api-access-h4xcl\") pod \"neutron-metadata-openstack-openstack-cell1-s78c9\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.274949 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.805273 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-openstack-openstack-cell1-s78c9"] Nov 21 16:12:15 crc kubenswrapper[4774]: W1121 16:12:15.815490 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fd555c3_cec8_4965_8851_4fbe8106ec02.slice/crio-f77f5a2c5a50a63fcfc79a577147b60d5ded789bed3e8ec865eea4d09aea1206 WatchSource:0}: Error finding container f77f5a2c5a50a63fcfc79a577147b60d5ded789bed3e8ec865eea4d09aea1206: Status 404 returned error can't find the container with id f77f5a2c5a50a63fcfc79a577147b60d5ded789bed3e8ec865eea4d09aea1206 Nov 21 16:12:15 crc kubenswrapper[4774]: I1121 16:12:15.838233 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" event={"ID":"4fd555c3-cec8-4965-8851-4fbe8106ec02","Type":"ContainerStarted","Data":"f77f5a2c5a50a63fcfc79a577147b60d5ded789bed3e8ec865eea4d09aea1206"} Nov 21 16:12:16 crc kubenswrapper[4774]: I1121 16:12:16.850316 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" event={"ID":"4fd555c3-cec8-4965-8851-4fbe8106ec02","Type":"ContainerStarted","Data":"6ec4bec101ddcf87b876dcd8e3f4dda17ea60582c0d4d43fef9537974b48355f"} Nov 21 16:12:16 crc kubenswrapper[4774]: I1121 16:12:16.868702 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" podStartSLOduration=2.18180389 podStartE2EDuration="2.868677157s" podCreationTimestamp="2025-11-21 16:12:14 +0000 UTC" firstStartedPulling="2025-11-21 16:12:15.817864076 +0000 UTC m=+7726.470063335" lastFinishedPulling="2025-11-21 16:12:16.504737343 +0000 UTC m=+7727.156936602" observedRunningTime="2025-11-21 16:12:16.865797415 +0000 UTC m=+7727.517996684" watchObservedRunningTime="2025-11-21 16:12:16.868677157 +0000 UTC m=+7727.520876416" Nov 21 16:12:29 crc kubenswrapper[4774]: I1121 16:12:29.601331 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:12:29 crc kubenswrapper[4774]: I1121 16:12:29.601934 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:12:59 crc kubenswrapper[4774]: I1121 16:12:59.600588 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:12:59 crc kubenswrapper[4774]: I1121 16:12:59.601118 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:13:10 crc kubenswrapper[4774]: I1121 16:13:10.399606 4774 generic.go:334] "Generic (PLEG): container finished" podID="4fd555c3-cec8-4965-8851-4fbe8106ec02" containerID="6ec4bec101ddcf87b876dcd8e3f4dda17ea60582c0d4d43fef9537974b48355f" exitCode=0 Nov 21 16:13:10 crc kubenswrapper[4774]: I1121 16:13:10.399708 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" event={"ID":"4fd555c3-cec8-4965-8851-4fbe8106ec02","Type":"ContainerDied","Data":"6ec4bec101ddcf87b876dcd8e3f4dda17ea60582c0d4d43fef9537974b48355f"} Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.872657 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.970747 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-nova-metadata-neutron-config-0\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.970892 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-inventory\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.970918 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-metadata-combined-ca-bundle\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.971028 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ceph\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.971100 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ssh-key\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.971189 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-ovn-metadata-agent-neutron-config-0\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.971212 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4xcl\" (UniqueName: \"kubernetes.io/projected/4fd555c3-cec8-4965-8851-4fbe8106ec02-kube-api-access-h4xcl\") pod \"4fd555c3-cec8-4965-8851-4fbe8106ec02\" (UID: \"4fd555c3-cec8-4965-8851-4fbe8106ec02\") " Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.976416 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ceph" (OuterVolumeSpecName: "ceph") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.976941 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:11.977009 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fd555c3-cec8-4965-8851-4fbe8106ec02-kube-api-access-h4xcl" (OuterVolumeSpecName: "kube-api-access-h4xcl") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "kube-api-access-h4xcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.003070 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.004917 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.005802 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.011925 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-inventory" (OuterVolumeSpecName: "inventory") pod "4fd555c3-cec8-4965-8851-4fbe8106ec02" (UID: "4fd555c3-cec8-4965-8851-4fbe8106ec02"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074442 4774 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074474 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074487 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074500 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074509 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074519 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/4fd555c3-cec8-4965-8851-4fbe8106ec02-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.074529 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4xcl\" (UniqueName: \"kubernetes.io/projected/4fd555c3-cec8-4965-8851-4fbe8106ec02-kube-api-access-h4xcl\") on node \"crc\" DevicePath \"\"" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.423043 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" event={"ID":"4fd555c3-cec8-4965-8851-4fbe8106ec02","Type":"ContainerDied","Data":"f77f5a2c5a50a63fcfc79a577147b60d5ded789bed3e8ec865eea4d09aea1206"} Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.423392 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f77f5a2c5a50a63fcfc79a577147b60d5ded789bed3e8ec865eea4d09aea1206" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.423136 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-openstack-openstack-cell1-s78c9" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.556637 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-b55tj"] Nov 21 16:13:12 crc kubenswrapper[4774]: E1121 16:13:12.557194 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fd555c3-cec8-4965-8851-4fbe8106ec02" containerName="neutron-metadata-openstack-openstack-cell1" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.557215 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fd555c3-cec8-4965-8851-4fbe8106ec02" containerName="neutron-metadata-openstack-openstack-cell1" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.557494 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fd555c3-cec8-4965-8851-4fbe8106ec02" containerName="neutron-metadata-openstack-openstack-cell1" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.558620 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.561385 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.564659 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.564806 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.565000 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.565127 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.574367 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-b55tj"] Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.686646 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.686770 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ceph\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.686859 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.686949 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ssh-key\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.687020 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnjdv\" (UniqueName: \"kubernetes.io/projected/bf4ece17-48c3-4137-9e1a-44d545af4a88-kube-api-access-wnjdv\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.687089 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-inventory\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.789160 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ssh-key\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.789222 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnjdv\" (UniqueName: \"kubernetes.io/projected/bf4ece17-48c3-4137-9e1a-44d545af4a88-kube-api-access-wnjdv\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.789296 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-inventory\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.789366 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.789421 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ceph\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.789461 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.793802 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ssh-key\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.793838 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ceph\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.794068 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-combined-ca-bundle\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.796417 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-secret-0\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.797175 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-inventory\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.805942 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnjdv\" (UniqueName: \"kubernetes.io/projected/bf4ece17-48c3-4137-9e1a-44d545af4a88-kube-api-access-wnjdv\") pod \"libvirt-openstack-openstack-cell1-b55tj\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:12 crc kubenswrapper[4774]: I1121 16:13:12.884466 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:13:13 crc kubenswrapper[4774]: I1121 16:13:13.376312 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-openstack-openstack-cell1-b55tj"] Nov 21 16:13:13 crc kubenswrapper[4774]: I1121 16:13:13.383927 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:13:13 crc kubenswrapper[4774]: I1121 16:13:13.440081 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" event={"ID":"bf4ece17-48c3-4137-9e1a-44d545af4a88","Type":"ContainerStarted","Data":"7e79b5f694fe06fd14e767a3fd4e07379e76bd4c8fb3efaadac31f9a19d72d84"} Nov 21 16:13:14 crc kubenswrapper[4774]: I1121 16:13:14.456619 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" event={"ID":"bf4ece17-48c3-4137-9e1a-44d545af4a88","Type":"ContainerStarted","Data":"ba46d03db3b51a9c18e9daac28f40be1dd4386c53672eb617e7ccba1442d7411"} Nov 21 16:13:14 crc kubenswrapper[4774]: I1121 16:13:14.481745 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" podStartSLOduration=2.062593683 podStartE2EDuration="2.481720203s" podCreationTimestamp="2025-11-21 16:13:12 +0000 UTC" firstStartedPulling="2025-11-21 16:13:13.383621281 +0000 UTC m=+7784.035820540" lastFinishedPulling="2025-11-21 16:13:13.802747801 +0000 UTC m=+7784.454947060" observedRunningTime="2025-11-21 16:13:14.478330436 +0000 UTC m=+7785.130529695" watchObservedRunningTime="2025-11-21 16:13:14.481720203 +0000 UTC m=+7785.133919462" Nov 21 16:13:29 crc kubenswrapper[4774]: I1121 16:13:29.601225 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:13:29 crc kubenswrapper[4774]: I1121 16:13:29.601894 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:13:29 crc kubenswrapper[4774]: I1121 16:13:29.601958 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:13:29 crc kubenswrapper[4774]: I1121 16:13:29.602839 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f694dbded2ebe51cf76565e124d2918bd57d74fbf4e28477972783a450f186b5"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:13:29 crc kubenswrapper[4774]: I1121 16:13:29.602905 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://f694dbded2ebe51cf76565e124d2918bd57d74fbf4e28477972783a450f186b5" gracePeriod=600 Nov 21 16:13:30 crc kubenswrapper[4774]: I1121 16:13:30.643200 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="f694dbded2ebe51cf76565e124d2918bd57d74fbf4e28477972783a450f186b5" exitCode=0 Nov 21 16:13:30 crc kubenswrapper[4774]: I1121 16:13:30.643281 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"f694dbded2ebe51cf76565e124d2918bd57d74fbf4e28477972783a450f186b5"} Nov 21 16:13:30 crc kubenswrapper[4774]: I1121 16:13:30.643759 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e"} Nov 21 16:13:30 crc kubenswrapper[4774]: I1121 16:13:30.643781 4774 scope.go:117] "RemoveContainer" containerID="563e9193f4be23f59019aec9f1be3f39923b19bf5049fa65a375387d0fcb91a4" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.150849 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8"] Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.153338 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.156010 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.157801 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.162239 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8"] Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.302033 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6a31625-d371-4566-8356-78138c6ec6aa-secret-volume\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.302111 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzqfp\" (UniqueName: \"kubernetes.io/projected/b6a31625-d371-4566-8356-78138c6ec6aa-kube-api-access-dzqfp\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.302215 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6a31625-d371-4566-8356-78138c6ec6aa-config-volume\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.404310 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6a31625-d371-4566-8356-78138c6ec6aa-secret-volume\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.404375 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzqfp\" (UniqueName: \"kubernetes.io/projected/b6a31625-d371-4566-8356-78138c6ec6aa-kube-api-access-dzqfp\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.404464 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6a31625-d371-4566-8356-78138c6ec6aa-config-volume\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.406764 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6a31625-d371-4566-8356-78138c6ec6aa-config-volume\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.413773 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6a31625-d371-4566-8356-78138c6ec6aa-secret-volume\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.420964 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzqfp\" (UniqueName: \"kubernetes.io/projected/b6a31625-d371-4566-8356-78138c6ec6aa-kube-api-access-dzqfp\") pod \"collect-profiles-29395695-q9vm8\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.484998 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:00 crc kubenswrapper[4774]: I1121 16:15:00.966569 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8"] Nov 21 16:15:01 crc kubenswrapper[4774]: I1121 16:15:01.550708 4774 generic.go:334] "Generic (PLEG): container finished" podID="b6a31625-d371-4566-8356-78138c6ec6aa" containerID="2045250172aed8b09fbde5c1badfcbdeafe29dc9067c7843d392d81800f8bfe1" exitCode=0 Nov 21 16:15:01 crc kubenswrapper[4774]: I1121 16:15:01.550880 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" event={"ID":"b6a31625-d371-4566-8356-78138c6ec6aa","Type":"ContainerDied","Data":"2045250172aed8b09fbde5c1badfcbdeafe29dc9067c7843d392d81800f8bfe1"} Nov 21 16:15:01 crc kubenswrapper[4774]: I1121 16:15:01.551097 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" event={"ID":"b6a31625-d371-4566-8356-78138c6ec6aa","Type":"ContainerStarted","Data":"8d5d4e51ba0a3f43bc2c85906debddec753b4ace5c8231144b76a85b01ad7f5b"} Nov 21 16:15:02 crc kubenswrapper[4774]: I1121 16:15:02.929648 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.062031 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6a31625-d371-4566-8356-78138c6ec6aa-secret-volume\") pod \"b6a31625-d371-4566-8356-78138c6ec6aa\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.062084 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6a31625-d371-4566-8356-78138c6ec6aa-config-volume\") pod \"b6a31625-d371-4566-8356-78138c6ec6aa\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.062158 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzqfp\" (UniqueName: \"kubernetes.io/projected/b6a31625-d371-4566-8356-78138c6ec6aa-kube-api-access-dzqfp\") pod \"b6a31625-d371-4566-8356-78138c6ec6aa\" (UID: \"b6a31625-d371-4566-8356-78138c6ec6aa\") " Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.063125 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6a31625-d371-4566-8356-78138c6ec6aa-config-volume" (OuterVolumeSpecName: "config-volume") pod "b6a31625-d371-4566-8356-78138c6ec6aa" (UID: "b6a31625-d371-4566-8356-78138c6ec6aa"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.068112 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6a31625-d371-4566-8356-78138c6ec6aa-kube-api-access-dzqfp" (OuterVolumeSpecName: "kube-api-access-dzqfp") pod "b6a31625-d371-4566-8356-78138c6ec6aa" (UID: "b6a31625-d371-4566-8356-78138c6ec6aa"). InnerVolumeSpecName "kube-api-access-dzqfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.068775 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6a31625-d371-4566-8356-78138c6ec6aa-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b6a31625-d371-4566-8356-78138c6ec6aa" (UID: "b6a31625-d371-4566-8356-78138c6ec6aa"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.165035 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b6a31625-d371-4566-8356-78138c6ec6aa-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.165078 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b6a31625-d371-4566-8356-78138c6ec6aa-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.165089 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzqfp\" (UniqueName: \"kubernetes.io/projected/b6a31625-d371-4566-8356-78138c6ec6aa-kube-api-access-dzqfp\") on node \"crc\" DevicePath \"\"" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.572915 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" event={"ID":"b6a31625-d371-4566-8356-78138c6ec6aa","Type":"ContainerDied","Data":"8d5d4e51ba0a3f43bc2c85906debddec753b4ace5c8231144b76a85b01ad7f5b"} Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.572958 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395695-q9vm8" Nov 21 16:15:03 crc kubenswrapper[4774]: I1121 16:15:03.572964 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d5d4e51ba0a3f43bc2c85906debddec753b4ace5c8231144b76a85b01ad7f5b" Nov 21 16:15:04 crc kubenswrapper[4774]: I1121 16:15:04.019982 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww"] Nov 21 16:15:04 crc kubenswrapper[4774]: I1121 16:15:04.032077 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395650-96zww"] Nov 21 16:15:04 crc kubenswrapper[4774]: I1121 16:15:04.142676 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2" path="/var/lib/kubelet/pods/f0cf48ff-e287-4b4b-b95d-b809b8b5a2e2/volumes" Nov 21 16:15:29 crc kubenswrapper[4774]: I1121 16:15:29.601402 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:15:29 crc kubenswrapper[4774]: I1121 16:15:29.602076 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.271334 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wvjqg"] Nov 21 16:15:38 crc kubenswrapper[4774]: E1121 16:15:38.272448 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6a31625-d371-4566-8356-78138c6ec6aa" containerName="collect-profiles" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.272464 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6a31625-d371-4566-8356-78138c6ec6aa" containerName="collect-profiles" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.297061 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6a31625-d371-4566-8356-78138c6ec6aa" containerName="collect-profiles" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.301288 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wvjqg"] Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.301427 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.395001 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-catalog-content\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.395412 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktlbp\" (UniqueName: \"kubernetes.io/projected/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-kube-api-access-ktlbp\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.395443 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-utilities\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.497848 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktlbp\" (UniqueName: \"kubernetes.io/projected/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-kube-api-access-ktlbp\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.498183 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-utilities\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.498706 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-catalog-content\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.498768 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-utilities\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.499061 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-catalog-content\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.522451 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktlbp\" (UniqueName: \"kubernetes.io/projected/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-kube-api-access-ktlbp\") pod \"community-operators-wvjqg\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:38 crc kubenswrapper[4774]: I1121 16:15:38.640968 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:39 crc kubenswrapper[4774]: I1121 16:15:39.194740 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wvjqg"] Nov 21 16:15:39 crc kubenswrapper[4774]: I1121 16:15:39.978539 4774 generic.go:334] "Generic (PLEG): container finished" podID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerID="6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6" exitCode=0 Nov 21 16:15:39 crc kubenswrapper[4774]: I1121 16:15:39.978650 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerDied","Data":"6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6"} Nov 21 16:15:39 crc kubenswrapper[4774]: I1121 16:15:39.978839 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerStarted","Data":"894afe8aeb207202afc7d35836e94852bb1e8c0b3332b1925733c2a052ce1301"} Nov 21 16:15:40 crc kubenswrapper[4774]: I1121 16:15:40.990739 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerStarted","Data":"e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046"} Nov 21 16:15:42 crc kubenswrapper[4774]: I1121 16:15:42.006711 4774 generic.go:334] "Generic (PLEG): container finished" podID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerID="e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046" exitCode=0 Nov 21 16:15:42 crc kubenswrapper[4774]: I1121 16:15:42.006807 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerDied","Data":"e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046"} Nov 21 16:15:43 crc kubenswrapper[4774]: I1121 16:15:43.017684 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerStarted","Data":"24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120"} Nov 21 16:15:43 crc kubenswrapper[4774]: I1121 16:15:43.039617 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wvjqg" podStartSLOduration=2.621834809 podStartE2EDuration="5.039596799s" podCreationTimestamp="2025-11-21 16:15:38 +0000 UTC" firstStartedPulling="2025-11-21 16:15:39.981844691 +0000 UTC m=+7930.634043950" lastFinishedPulling="2025-11-21 16:15:42.399606681 +0000 UTC m=+7933.051805940" observedRunningTime="2025-11-21 16:15:43.03294713 +0000 UTC m=+7933.685146389" watchObservedRunningTime="2025-11-21 16:15:43.039596799 +0000 UTC m=+7933.691796068" Nov 21 16:15:48 crc kubenswrapper[4774]: I1121 16:15:48.641078 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:48 crc kubenswrapper[4774]: I1121 16:15:48.641868 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:48 crc kubenswrapper[4774]: I1121 16:15:48.689863 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:49 crc kubenswrapper[4774]: I1121 16:15:49.123897 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:49 crc kubenswrapper[4774]: I1121 16:15:49.175417 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wvjqg"] Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.092791 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wvjqg" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="registry-server" containerID="cri-o://24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120" gracePeriod=2 Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.576485 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.696400 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktlbp\" (UniqueName: \"kubernetes.io/projected/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-kube-api-access-ktlbp\") pod \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.696681 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-utilities\") pod \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.696713 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-catalog-content\") pod \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\" (UID: \"c6791a62-1af2-4c3b-82a4-4a6e87b4d519\") " Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.701856 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-utilities" (OuterVolumeSpecName: "utilities") pod "c6791a62-1af2-4c3b-82a4-4a6e87b4d519" (UID: "c6791a62-1af2-4c3b-82a4-4a6e87b4d519"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.703320 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-kube-api-access-ktlbp" (OuterVolumeSpecName: "kube-api-access-ktlbp") pod "c6791a62-1af2-4c3b-82a4-4a6e87b4d519" (UID: "c6791a62-1af2-4c3b-82a4-4a6e87b4d519"). InnerVolumeSpecName "kube-api-access-ktlbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.749419 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6791a62-1af2-4c3b-82a4-4a6e87b4d519" (UID: "c6791a62-1af2-4c3b-82a4-4a6e87b4d519"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.799579 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.799840 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:15:51 crc kubenswrapper[4774]: I1121 16:15:51.799904 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktlbp\" (UniqueName: \"kubernetes.io/projected/c6791a62-1af2-4c3b-82a4-4a6e87b4d519-kube-api-access-ktlbp\") on node \"crc\" DevicePath \"\"" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.106461 4774 generic.go:334] "Generic (PLEG): container finished" podID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerID="24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120" exitCode=0 Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.106617 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wvjqg" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.112654 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerDied","Data":"24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120"} Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.112709 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wvjqg" event={"ID":"c6791a62-1af2-4c3b-82a4-4a6e87b4d519","Type":"ContainerDied","Data":"894afe8aeb207202afc7d35836e94852bb1e8c0b3332b1925733c2a052ce1301"} Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.112739 4774 scope.go:117] "RemoveContainer" containerID="24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.143291 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wvjqg"] Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.149222 4774 scope.go:117] "RemoveContainer" containerID="e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.157246 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wvjqg"] Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.172350 4774 scope.go:117] "RemoveContainer" containerID="6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.218430 4774 scope.go:117] "RemoveContainer" containerID="24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120" Nov 21 16:15:52 crc kubenswrapper[4774]: E1121 16:15:52.220618 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120\": container with ID starting with 24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120 not found: ID does not exist" containerID="24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.220669 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120"} err="failed to get container status \"24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120\": rpc error: code = NotFound desc = could not find container \"24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120\": container with ID starting with 24a1c5ea0f746e84347794f28e4de01aec2bf29aaf2344749a510997b3153120 not found: ID does not exist" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.220701 4774 scope.go:117] "RemoveContainer" containerID="e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046" Nov 21 16:15:52 crc kubenswrapper[4774]: E1121 16:15:52.221046 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046\": container with ID starting with e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046 not found: ID does not exist" containerID="e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.221109 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046"} err="failed to get container status \"e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046\": rpc error: code = NotFound desc = could not find container \"e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046\": container with ID starting with e6556b2d0fd6faf0480212e6982f8ded42d4fee41504434dc124010fca317046 not found: ID does not exist" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.221141 4774 scope.go:117] "RemoveContainer" containerID="6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6" Nov 21 16:15:52 crc kubenswrapper[4774]: E1121 16:15:52.221423 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6\": container with ID starting with 6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6 not found: ID does not exist" containerID="6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6" Nov 21 16:15:52 crc kubenswrapper[4774]: I1121 16:15:52.221454 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6"} err="failed to get container status \"6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6\": rpc error: code = NotFound desc = could not find container \"6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6\": container with ID starting with 6215541028b710633bb2c5ca21e2fcfaa9a9b02819b91d526631764eb0d159d6 not found: ID does not exist" Nov 21 16:15:53 crc kubenswrapper[4774]: I1121 16:15:53.333616 4774 scope.go:117] "RemoveContainer" containerID="84726f4d7803321f2f6fc42ea4491b744e590dca967102f35a9047fedd9a318b" Nov 21 16:15:54 crc kubenswrapper[4774]: I1121 16:15:54.104992 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" path="/var/lib/kubelet/pods/c6791a62-1af2-4c3b-82a4-4a6e87b4d519/volumes" Nov 21 16:15:59 crc kubenswrapper[4774]: I1121 16:15:59.600737 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:15:59 crc kubenswrapper[4774]: I1121 16:15:59.601371 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:16:29 crc kubenswrapper[4774]: I1121 16:16:29.600549 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:16:29 crc kubenswrapper[4774]: I1121 16:16:29.601292 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:16:29 crc kubenswrapper[4774]: I1121 16:16:29.601343 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:16:29 crc kubenswrapper[4774]: I1121 16:16:29.602215 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:16:29 crc kubenswrapper[4774]: I1121 16:16:29.602285 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" gracePeriod=600 Nov 21 16:16:29 crc kubenswrapper[4774]: E1121 16:16:29.743834 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:16:30 crc kubenswrapper[4774]: I1121 16:16:30.495657 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" exitCode=0 Nov 21 16:16:30 crc kubenswrapper[4774]: I1121 16:16:30.495726 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e"} Nov 21 16:16:30 crc kubenswrapper[4774]: I1121 16:16:30.496056 4774 scope.go:117] "RemoveContainer" containerID="f694dbded2ebe51cf76565e124d2918bd57d74fbf4e28477972783a450f186b5" Nov 21 16:16:30 crc kubenswrapper[4774]: I1121 16:16:30.496784 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:16:30 crc kubenswrapper[4774]: E1121 16:16:30.497138 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:16:42 crc kubenswrapper[4774]: I1121 16:16:42.093388 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:16:42 crc kubenswrapper[4774]: E1121 16:16:42.094161 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:16:56 crc kubenswrapper[4774]: I1121 16:16:56.093551 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:16:56 crc kubenswrapper[4774]: E1121 16:16:56.094307 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:17:11 crc kubenswrapper[4774]: I1121 16:17:11.093948 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:17:11 crc kubenswrapper[4774]: E1121 16:17:11.095325 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.179145 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5gv4d"] Nov 21 16:17:17 crc kubenswrapper[4774]: E1121 16:17:17.180315 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="extract-utilities" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.180333 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="extract-utilities" Nov 21 16:17:17 crc kubenswrapper[4774]: E1121 16:17:17.180371 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="extract-content" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.180378 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="extract-content" Nov 21 16:17:17 crc kubenswrapper[4774]: E1121 16:17:17.180392 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="registry-server" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.180400 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="registry-server" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.180686 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6791a62-1af2-4c3b-82a4-4a6e87b4d519" containerName="registry-server" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.183318 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.193170 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gv4d"] Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.242734 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-catalog-content\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.243127 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqf97\" (UniqueName: \"kubernetes.io/projected/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-kube-api-access-qqf97\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.243160 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-utilities\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.344982 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-catalog-content\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.345048 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqf97\" (UniqueName: \"kubernetes.io/projected/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-kube-api-access-qqf97\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.345071 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-utilities\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.345737 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-catalog-content\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.345774 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-utilities\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.365716 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqf97\" (UniqueName: \"kubernetes.io/projected/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-kube-api-access-qqf97\") pod \"redhat-marketplace-5gv4d\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.518395 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:17 crc kubenswrapper[4774]: I1121 16:17:17.990954 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gv4d"] Nov 21 16:17:18 crc kubenswrapper[4774]: I1121 16:17:18.977483 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hm55g"] Nov 21 16:17:18 crc kubenswrapper[4774]: I1121 16:17:18.980940 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.007151 4774 generic.go:334] "Generic (PLEG): container finished" podID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerID="808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742" exitCode=0 Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.007203 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gv4d" event={"ID":"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef","Type":"ContainerDied","Data":"808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742"} Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.007236 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gv4d" event={"ID":"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef","Type":"ContainerStarted","Data":"4888e28980e6156206e97efa8f9ca8f0d5f5d0fdd2181de42f9842c23e458e84"} Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.010308 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hm55g"] Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.085802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzz56\" (UniqueName: \"kubernetes.io/projected/feb090a4-bb07-4123-84e1-5290abfa47b1-kube-api-access-qzz56\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.086116 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-utilities\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.086227 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-catalog-content\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.188625 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzz56\" (UniqueName: \"kubernetes.io/projected/feb090a4-bb07-4123-84e1-5290abfa47b1-kube-api-access-qzz56\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.188723 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-utilities\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.188918 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-catalog-content\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.189401 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-utilities\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.189751 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-catalog-content\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.210495 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzz56\" (UniqueName: \"kubernetes.io/projected/feb090a4-bb07-4123-84e1-5290abfa47b1-kube-api-access-qzz56\") pod \"redhat-operators-hm55g\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.307291 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:19 crc kubenswrapper[4774]: I1121 16:17:19.799878 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hm55g"] Nov 21 16:17:20 crc kubenswrapper[4774]: I1121 16:17:20.027280 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerStarted","Data":"c40c842cb4b1183653ccc7038dc2404de4739a05c5bdf625d004e301cd900b3c"} Nov 21 16:17:21 crc kubenswrapper[4774]: I1121 16:17:21.047728 4774 generic.go:334] "Generic (PLEG): container finished" podID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerID="7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5" exitCode=0 Nov 21 16:17:21 crc kubenswrapper[4774]: I1121 16:17:21.047952 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gv4d" event={"ID":"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef","Type":"ContainerDied","Data":"7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5"} Nov 21 16:17:21 crc kubenswrapper[4774]: I1121 16:17:21.050357 4774 generic.go:334] "Generic (PLEG): container finished" podID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerID="c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1" exitCode=0 Nov 21 16:17:21 crc kubenswrapper[4774]: I1121 16:17:21.050475 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerDied","Data":"c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1"} Nov 21 16:17:22 crc kubenswrapper[4774]: I1121 16:17:22.063292 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gv4d" event={"ID":"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef","Type":"ContainerStarted","Data":"9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641"} Nov 21 16:17:22 crc kubenswrapper[4774]: I1121 16:17:22.066192 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerStarted","Data":"51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b"} Nov 21 16:17:22 crc kubenswrapper[4774]: I1121 16:17:22.082974 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5gv4d" podStartSLOduration=2.65038208 podStartE2EDuration="5.082946444s" podCreationTimestamp="2025-11-21 16:17:17 +0000 UTC" firstStartedPulling="2025-11-21 16:17:19.009160048 +0000 UTC m=+8029.661359307" lastFinishedPulling="2025-11-21 16:17:21.441724412 +0000 UTC m=+8032.093923671" observedRunningTime="2025-11-21 16:17:22.07966629 +0000 UTC m=+8032.731865559" watchObservedRunningTime="2025-11-21 16:17:22.082946444 +0000 UTC m=+8032.735145703" Nov 21 16:17:25 crc kubenswrapper[4774]: I1121 16:17:25.095614 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:17:25 crc kubenswrapper[4774]: E1121 16:17:25.098052 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:17:26 crc kubenswrapper[4774]: I1121 16:17:26.120220 4774 generic.go:334] "Generic (PLEG): container finished" podID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerID="51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b" exitCode=0 Nov 21 16:17:26 crc kubenswrapper[4774]: I1121 16:17:26.120309 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerDied","Data":"51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b"} Nov 21 16:17:27 crc kubenswrapper[4774]: I1121 16:17:27.132576 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerStarted","Data":"5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2"} Nov 21 16:17:27 crc kubenswrapper[4774]: I1121 16:17:27.160071 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hm55g" podStartSLOduration=3.6514460509999997 podStartE2EDuration="9.160049255s" podCreationTimestamp="2025-11-21 16:17:18 +0000 UTC" firstStartedPulling="2025-11-21 16:17:21.052353831 +0000 UTC m=+8031.704553090" lastFinishedPulling="2025-11-21 16:17:26.560957035 +0000 UTC m=+8037.213156294" observedRunningTime="2025-11-21 16:17:27.148550486 +0000 UTC m=+8037.800749755" watchObservedRunningTime="2025-11-21 16:17:27.160049255 +0000 UTC m=+8037.812248514" Nov 21 16:17:27 crc kubenswrapper[4774]: I1121 16:17:27.519489 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:27 crc kubenswrapper[4774]: I1121 16:17:27.519595 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:27 crc kubenswrapper[4774]: I1121 16:17:27.575735 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:28 crc kubenswrapper[4774]: I1121 16:17:28.199633 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:28 crc kubenswrapper[4774]: I1121 16:17:28.768840 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gv4d"] Nov 21 16:17:29 crc kubenswrapper[4774]: I1121 16:17:29.308025 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:29 crc kubenswrapper[4774]: I1121 16:17:29.308431 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.161013 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5gv4d" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="registry-server" containerID="cri-o://9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641" gracePeriod=2 Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.361905 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hm55g" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="registry-server" probeResult="failure" output=< Nov 21 16:17:30 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:17:30 crc kubenswrapper[4774]: > Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.667269 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.789769 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-catalog-content\") pod \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.790153 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-utilities\") pod \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.790224 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqf97\" (UniqueName: \"kubernetes.io/projected/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-kube-api-access-qqf97\") pod \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\" (UID: \"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef\") " Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.791292 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-utilities" (OuterVolumeSpecName: "utilities") pod "b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" (UID: "b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.797358 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-kube-api-access-qqf97" (OuterVolumeSpecName: "kube-api-access-qqf97") pod "b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" (UID: "b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef"). InnerVolumeSpecName "kube-api-access-qqf97". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.809451 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" (UID: "b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.894242 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.894340 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqf97\" (UniqueName: \"kubernetes.io/projected/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-kube-api-access-qqf97\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:30 crc kubenswrapper[4774]: I1121 16:17:30.894356 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.175017 4774 generic.go:334] "Generic (PLEG): container finished" podID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerID="9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641" exitCode=0 Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.175076 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5gv4d" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.175089 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gv4d" event={"ID":"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef","Type":"ContainerDied","Data":"9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641"} Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.175167 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5gv4d" event={"ID":"b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef","Type":"ContainerDied","Data":"4888e28980e6156206e97efa8f9ca8f0d5f5d0fdd2181de42f9842c23e458e84"} Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.175192 4774 scope.go:117] "RemoveContainer" containerID="9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.198550 4774 scope.go:117] "RemoveContainer" containerID="7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.211194 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gv4d"] Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.220696 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5gv4d"] Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.240034 4774 scope.go:117] "RemoveContainer" containerID="808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.282641 4774 scope.go:117] "RemoveContainer" containerID="9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641" Nov 21 16:17:31 crc kubenswrapper[4774]: E1121 16:17:31.283157 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641\": container with ID starting with 9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641 not found: ID does not exist" containerID="9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.283204 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641"} err="failed to get container status \"9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641\": rpc error: code = NotFound desc = could not find container \"9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641\": container with ID starting with 9b41b4fba524a267a18dbb448768b6a0f54bd629636121b9ee8b4489b3a07641 not found: ID does not exist" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.283236 4774 scope.go:117] "RemoveContainer" containerID="7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5" Nov 21 16:17:31 crc kubenswrapper[4774]: E1121 16:17:31.283560 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5\": container with ID starting with 7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5 not found: ID does not exist" containerID="7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.283600 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5"} err="failed to get container status \"7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5\": rpc error: code = NotFound desc = could not find container \"7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5\": container with ID starting with 7b67bd8a52570df7a3c3a9d4095720299d7e85b0c5904d7347e973f3b53da7d5 not found: ID does not exist" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.283629 4774 scope.go:117] "RemoveContainer" containerID="808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742" Nov 21 16:17:31 crc kubenswrapper[4774]: E1121 16:17:31.283945 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742\": container with ID starting with 808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742 not found: ID does not exist" containerID="808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742" Nov 21 16:17:31 crc kubenswrapper[4774]: I1121 16:17:31.283971 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742"} err="failed to get container status \"808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742\": rpc error: code = NotFound desc = could not find container \"808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742\": container with ID starting with 808b0f8de9939e1c0fce7a75dca86f07b308937b034db04bd42356c0686ad742 not found: ID does not exist" Nov 21 16:17:32 crc kubenswrapper[4774]: I1121 16:17:32.105311 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" path="/var/lib/kubelet/pods/b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef/volumes" Nov 21 16:17:37 crc kubenswrapper[4774]: I1121 16:17:37.093744 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:17:37 crc kubenswrapper[4774]: E1121 16:17:37.094651 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:17:39 crc kubenswrapper[4774]: I1121 16:17:39.365434 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:39 crc kubenswrapper[4774]: I1121 16:17:39.417129 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:39 crc kubenswrapper[4774]: I1121 16:17:39.605374 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hm55g"] Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.297582 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hm55g" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="registry-server" containerID="cri-o://5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2" gracePeriod=2 Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.804237 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.933448 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzz56\" (UniqueName: \"kubernetes.io/projected/feb090a4-bb07-4123-84e1-5290abfa47b1-kube-api-access-qzz56\") pod \"feb090a4-bb07-4123-84e1-5290abfa47b1\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.933776 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-catalog-content\") pod \"feb090a4-bb07-4123-84e1-5290abfa47b1\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.933907 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-utilities\") pod \"feb090a4-bb07-4123-84e1-5290abfa47b1\" (UID: \"feb090a4-bb07-4123-84e1-5290abfa47b1\") " Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.935432 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-utilities" (OuterVolumeSpecName: "utilities") pod "feb090a4-bb07-4123-84e1-5290abfa47b1" (UID: "feb090a4-bb07-4123-84e1-5290abfa47b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:17:41 crc kubenswrapper[4774]: I1121 16:17:41.939131 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feb090a4-bb07-4123-84e1-5290abfa47b1-kube-api-access-qzz56" (OuterVolumeSpecName: "kube-api-access-qzz56") pod "feb090a4-bb07-4123-84e1-5290abfa47b1" (UID: "feb090a4-bb07-4123-84e1-5290abfa47b1"). InnerVolumeSpecName "kube-api-access-qzz56". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.036578 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzz56\" (UniqueName: \"kubernetes.io/projected/feb090a4-bb07-4123-84e1-5290abfa47b1-kube-api-access-qzz56\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.036620 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.037512 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "feb090a4-bb07-4123-84e1-5290abfa47b1" (UID: "feb090a4-bb07-4123-84e1-5290abfa47b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.140878 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb090a4-bb07-4123-84e1-5290abfa47b1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.314582 4774 generic.go:334] "Generic (PLEG): container finished" podID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerID="5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2" exitCode=0 Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.314635 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerDied","Data":"5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2"} Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.314666 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hm55g" event={"ID":"feb090a4-bb07-4123-84e1-5290abfa47b1","Type":"ContainerDied","Data":"c40c842cb4b1183653ccc7038dc2404de4739a05c5bdf625d004e301cd900b3c"} Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.314661 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hm55g" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.314755 4774 scope.go:117] "RemoveContainer" containerID="5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.345493 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hm55g"] Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.353930 4774 scope.go:117] "RemoveContainer" containerID="51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.358520 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hm55g"] Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.377341 4774 scope.go:117] "RemoveContainer" containerID="c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.437706 4774 scope.go:117] "RemoveContainer" containerID="5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2" Nov 21 16:17:42 crc kubenswrapper[4774]: E1121 16:17:42.438452 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2\": container with ID starting with 5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2 not found: ID does not exist" containerID="5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.438559 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2"} err="failed to get container status \"5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2\": rpc error: code = NotFound desc = could not find container \"5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2\": container with ID starting with 5f29c88659315c7f89e51d7448ab3f53e1b98b6f2bdba363f9b5ed13c50b41e2 not found: ID does not exist" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.438676 4774 scope.go:117] "RemoveContainer" containerID="51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b" Nov 21 16:17:42 crc kubenswrapper[4774]: E1121 16:17:42.439516 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b\": container with ID starting with 51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b not found: ID does not exist" containerID="51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.439593 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b"} err="failed to get container status \"51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b\": rpc error: code = NotFound desc = could not find container \"51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b\": container with ID starting with 51bc43cbb1efa6b06aeaba3f704b2fb57f357aa8853de59d54ca01716e21579b not found: ID does not exist" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.439657 4774 scope.go:117] "RemoveContainer" containerID="c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1" Nov 21 16:17:42 crc kubenswrapper[4774]: E1121 16:17:42.440067 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1\": container with ID starting with c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1 not found: ID does not exist" containerID="c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1" Nov 21 16:17:42 crc kubenswrapper[4774]: I1121 16:17:42.440096 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1"} err="failed to get container status \"c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1\": rpc error: code = NotFound desc = could not find container \"c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1\": container with ID starting with c75dbd0f8e94faafbc4a54829aa3614d835518a7c62a75f34cae0922b1fbbcf1 not found: ID does not exist" Nov 21 16:17:44 crc kubenswrapper[4774]: I1121 16:17:44.106903 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" path="/var/lib/kubelet/pods/feb090a4-bb07-4123-84e1-5290abfa47b1/volumes" Nov 21 16:17:49 crc kubenswrapper[4774]: I1121 16:17:49.393261 4774 generic.go:334] "Generic (PLEG): container finished" podID="bf4ece17-48c3-4137-9e1a-44d545af4a88" containerID="ba46d03db3b51a9c18e9daac28f40be1dd4386c53672eb617e7ccba1442d7411" exitCode=0 Nov 21 16:17:49 crc kubenswrapper[4774]: I1121 16:17:49.393366 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" event={"ID":"bf4ece17-48c3-4137-9e1a-44d545af4a88","Type":"ContainerDied","Data":"ba46d03db3b51a9c18e9daac28f40be1dd4386c53672eb617e7ccba1442d7411"} Nov 21 16:17:50 crc kubenswrapper[4774]: I1121 16:17:50.879483 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.033455 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ssh-key\") pod \"bf4ece17-48c3-4137-9e1a-44d545af4a88\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.033609 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-secret-0\") pod \"bf4ece17-48c3-4137-9e1a-44d545af4a88\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.033681 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-inventory\") pod \"bf4ece17-48c3-4137-9e1a-44d545af4a88\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.033745 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnjdv\" (UniqueName: \"kubernetes.io/projected/bf4ece17-48c3-4137-9e1a-44d545af4a88-kube-api-access-wnjdv\") pod \"bf4ece17-48c3-4137-9e1a-44d545af4a88\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.033933 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-combined-ca-bundle\") pod \"bf4ece17-48c3-4137-9e1a-44d545af4a88\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.034098 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ceph\") pod \"bf4ece17-48c3-4137-9e1a-44d545af4a88\" (UID: \"bf4ece17-48c3-4137-9e1a-44d545af4a88\") " Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.038707 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf4ece17-48c3-4137-9e1a-44d545af4a88-kube-api-access-wnjdv" (OuterVolumeSpecName: "kube-api-access-wnjdv") pod "bf4ece17-48c3-4137-9e1a-44d545af4a88" (UID: "bf4ece17-48c3-4137-9e1a-44d545af4a88"). InnerVolumeSpecName "kube-api-access-wnjdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.045214 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ceph" (OuterVolumeSpecName: "ceph") pod "bf4ece17-48c3-4137-9e1a-44d545af4a88" (UID: "bf4ece17-48c3-4137-9e1a-44d545af4a88"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.045420 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "bf4ece17-48c3-4137-9e1a-44d545af4a88" (UID: "bf4ece17-48c3-4137-9e1a-44d545af4a88"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.066719 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "bf4ece17-48c3-4137-9e1a-44d545af4a88" (UID: "bf4ece17-48c3-4137-9e1a-44d545af4a88"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.068037 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-inventory" (OuterVolumeSpecName: "inventory") pod "bf4ece17-48c3-4137-9e1a-44d545af4a88" (UID: "bf4ece17-48c3-4137-9e1a-44d545af4a88"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.070319 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bf4ece17-48c3-4137-9e1a-44d545af4a88" (UID: "bf4ece17-48c3-4137-9e1a-44d545af4a88"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.093051 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.093406 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.137064 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.137106 4774 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.137120 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.137133 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnjdv\" (UniqueName: \"kubernetes.io/projected/bf4ece17-48c3-4137-9e1a-44d545af4a88-kube-api-access-wnjdv\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.137146 4774 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.137158 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/bf4ece17-48c3-4137-9e1a-44d545af4a88-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.415105 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" event={"ID":"bf4ece17-48c3-4137-9e1a-44d545af4a88","Type":"ContainerDied","Data":"7e79b5f694fe06fd14e767a3fd4e07379e76bd4c8fb3efaadac31f9a19d72d84"} Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.415145 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e79b5f694fe06fd14e767a3fd4e07379e76bd4c8fb3efaadac31f9a19d72d84" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.415180 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-openstack-openstack-cell1-b55tj" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.506411 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-6x4bb"] Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.507229 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="extract-content" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.507316 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="extract-content" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.507395 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="registry-server" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.507492 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="registry-server" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.507567 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="extract-utilities" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.507629 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="extract-utilities" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.507698 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="extract-utilities" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.507766 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="extract-utilities" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.507863 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf4ece17-48c3-4137-9e1a-44d545af4a88" containerName="libvirt-openstack-openstack-cell1" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.507944 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf4ece17-48c3-4137-9e1a-44d545af4a88" containerName="libvirt-openstack-openstack-cell1" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.508013 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="registry-server" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.508076 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="registry-server" Nov 21 16:17:51 crc kubenswrapper[4774]: E1121 16:17:51.508187 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="extract-content" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.508289 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="extract-content" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.508631 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb090a4-bb07-4123-84e1-5290abfa47b1" containerName="registry-server" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.508718 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2132e64-3826-4d5f-9fb3-1df0b0a9d5ef" containerName="registry-server" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.508798 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf4ece17-48c3-4137-9e1a-44d545af4a88" containerName="libvirt-openstack-openstack-cell1" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.509931 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.512164 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.516675 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.517899 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.518071 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.518099 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.518127 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.518145 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.521080 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-6x4bb"] Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.650913 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651193 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ceph\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651441 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mbhw\" (UniqueName: \"kubernetes.io/projected/573bb757-eff3-426f-a71e-7d1c21f6cf67-kube-api-access-7mbhw\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651643 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651723 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651777 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651844 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651952 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.651985 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.652002 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.652077 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-inventory\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.754889 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.754948 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ceph\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.754990 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mbhw\" (UniqueName: \"kubernetes.io/projected/573bb757-eff3-426f-a71e-7d1c21f6cf67-kube-api-access-7mbhw\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755031 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755430 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755493 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755533 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755665 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755706 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755757 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.755948 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-inventory\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.756217 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.756780 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.758860 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.759763 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ssh-key\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.760446 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-inventory\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.761876 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.762124 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.762245 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.762665 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ceph\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.764077 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.772517 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mbhw\" (UniqueName: \"kubernetes.io/projected/573bb757-eff3-426f-a71e-7d1c21f6cf67-kube-api-access-7mbhw\") pod \"nova-cell1-openstack-openstack-cell1-6x4bb\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:51 crc kubenswrapper[4774]: I1121 16:17:51.832420 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:17:52 crc kubenswrapper[4774]: I1121 16:17:52.402377 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-openstack-cell1-6x4bb"] Nov 21 16:17:52 crc kubenswrapper[4774]: I1121 16:17:52.430914 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" event={"ID":"573bb757-eff3-426f-a71e-7d1c21f6cf67","Type":"ContainerStarted","Data":"7f209e3b9c5178fda6942dbc89145601dbd03519d9b8d79fa926a9d2eab8a2db"} Nov 21 16:17:55 crc kubenswrapper[4774]: I1121 16:17:55.481506 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" event={"ID":"573bb757-eff3-426f-a71e-7d1c21f6cf67","Type":"ContainerStarted","Data":"b81720a7b4ad50b7088d854b077ea14aa6b965c614e9756df235d5e7474ffeb7"} Nov 21 16:17:55 crc kubenswrapper[4774]: I1121 16:17:55.504879 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" podStartSLOduration=2.8691920079999997 podStartE2EDuration="4.504853871s" podCreationTimestamp="2025-11-21 16:17:51 +0000 UTC" firstStartedPulling="2025-11-21 16:17:52.412081083 +0000 UTC m=+8063.064280342" lastFinishedPulling="2025-11-21 16:17:54.047742946 +0000 UTC m=+8064.699942205" observedRunningTime="2025-11-21 16:17:55.500369933 +0000 UTC m=+8066.152569212" watchObservedRunningTime="2025-11-21 16:17:55.504853871 +0000 UTC m=+8066.157053150" Nov 21 16:18:04 crc kubenswrapper[4774]: I1121 16:18:04.093198 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:18:04 crc kubenswrapper[4774]: E1121 16:18:04.094114 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:18:15 crc kubenswrapper[4774]: I1121 16:18:15.094215 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:18:15 crc kubenswrapper[4774]: E1121 16:18:15.096161 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:18:26 crc kubenswrapper[4774]: I1121 16:18:26.092871 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:18:26 crc kubenswrapper[4774]: E1121 16:18:26.093598 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:18:39 crc kubenswrapper[4774]: I1121 16:18:39.093760 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:18:39 crc kubenswrapper[4774]: E1121 16:18:39.094768 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:18:54 crc kubenswrapper[4774]: I1121 16:18:54.093612 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:18:54 crc kubenswrapper[4774]: E1121 16:18:54.094361 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:19:09 crc kubenswrapper[4774]: I1121 16:19:09.093779 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:19:09 crc kubenswrapper[4774]: E1121 16:19:09.094504 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:19:24 crc kubenswrapper[4774]: I1121 16:19:24.093774 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:19:24 crc kubenswrapper[4774]: E1121 16:19:24.094607 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:19:35 crc kubenswrapper[4774]: I1121 16:19:35.094236 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:19:35 crc kubenswrapper[4774]: E1121 16:19:35.095078 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:19:46 crc kubenswrapper[4774]: I1121 16:19:46.093397 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:19:46 crc kubenswrapper[4774]: E1121 16:19:46.094053 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:20:01 crc kubenswrapper[4774]: I1121 16:20:01.093065 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:20:01 crc kubenswrapper[4774]: E1121 16:20:01.093945 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:20:12 crc kubenswrapper[4774]: I1121 16:20:12.092923 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:20:12 crc kubenswrapper[4774]: E1121 16:20:12.094647 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:20:25 crc kubenswrapper[4774]: I1121 16:20:25.092981 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:20:25 crc kubenswrapper[4774]: E1121 16:20:25.093866 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:20:37 crc kubenswrapper[4774]: I1121 16:20:37.093897 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:20:37 crc kubenswrapper[4774]: E1121 16:20:37.094835 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:20:51 crc kubenswrapper[4774]: I1121 16:20:51.099376 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:20:51 crc kubenswrapper[4774]: E1121 16:20:51.100757 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:20:58 crc kubenswrapper[4774]: I1121 16:20:58.344903 4774 generic.go:334] "Generic (PLEG): container finished" podID="573bb757-eff3-426f-a71e-7d1c21f6cf67" containerID="b81720a7b4ad50b7088d854b077ea14aa6b965c614e9756df235d5e7474ffeb7" exitCode=0 Nov 21 16:20:58 crc kubenswrapper[4774]: I1121 16:20:58.344994 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" event={"ID":"573bb757-eff3-426f-a71e-7d1c21f6cf67","Type":"ContainerDied","Data":"b81720a7b4ad50b7088d854b077ea14aa6b965c614e9756df235d5e7474ffeb7"} Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.784298 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892028 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ssh-key\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892070 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-0\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892093 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-1\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892145 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-1\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892175 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-1\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892232 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-0\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892259 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-inventory\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892289 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-combined-ca-bundle\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892318 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mbhw\" (UniqueName: \"kubernetes.io/projected/573bb757-eff3-426f-a71e-7d1c21f6cf67-kube-api-access-7mbhw\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892372 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ceph\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.892512 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-0\") pod \"573bb757-eff3-426f-a71e-7d1c21f6cf67\" (UID: \"573bb757-eff3-426f-a71e-7d1c21f6cf67\") " Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.898205 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ceph" (OuterVolumeSpecName: "ceph") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.899295 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.899524 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/573bb757-eff3-426f-a71e-7d1c21f6cf67-kube-api-access-7mbhw" (OuterVolumeSpecName: "kube-api-access-7mbhw") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "kube-api-access-7mbhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.922496 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.925988 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.926428 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-inventory" (OuterVolumeSpecName: "inventory") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.927953 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.930460 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.931920 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.944865 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.945314 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "573bb757-eff3-426f-a71e-7d1c21f6cf67" (UID: "573bb757-eff3-426f-a71e-7d1c21f6cf67"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995506 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995551 4774 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995567 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995580 4774 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995593 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995610 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995621 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995633 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995644 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mbhw\" (UniqueName: \"kubernetes.io/projected/573bb757-eff3-426f-a71e-7d1c21f6cf67-kube-api-access-7mbhw\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995655 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:20:59 crc kubenswrapper[4774]: I1121 16:20:59.995668 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/573bb757-eff3-426f-a71e-7d1c21f6cf67-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.370709 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" event={"ID":"573bb757-eff3-426f-a71e-7d1c21f6cf67","Type":"ContainerDied","Data":"7f209e3b9c5178fda6942dbc89145601dbd03519d9b8d79fa926a9d2eab8a2db"} Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.370783 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f209e3b9c5178fda6942dbc89145601dbd03519d9b8d79fa926a9d2eab8a2db" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.370889 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-openstack-cell1-6x4bb" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.479472 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-5whzr"] Nov 21 16:21:00 crc kubenswrapper[4774]: E1121 16:21:00.480437 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="573bb757-eff3-426f-a71e-7d1c21f6cf67" containerName="nova-cell1-openstack-openstack-cell1" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.480464 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="573bb757-eff3-426f-a71e-7d1c21f6cf67" containerName="nova-cell1-openstack-openstack-cell1" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.480742 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="573bb757-eff3-426f-a71e-7d1c21f6cf67" containerName="nova-cell1-openstack-openstack-cell1" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.481752 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.494357 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-5whzr"] Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.494679 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.494689 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.495250 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.495472 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.495634 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.611987 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-inventory\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612187 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ssh-key\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612274 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceph\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612449 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sn6l\" (UniqueName: \"kubernetes.io/projected/01366752-fe13-4f55-be84-723e156bfdb1-kube-api-access-4sn6l\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612603 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612774 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612889 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.612979 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715251 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-inventory\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715347 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ssh-key\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715413 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceph\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715483 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sn6l\" (UniqueName: \"kubernetes.io/projected/01366752-fe13-4f55-be84-723e156bfdb1-kube-api-access-4sn6l\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715535 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715599 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715676 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.715710 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.720485 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-2\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.720674 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceph\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.720873 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-1\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.721722 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-telemetry-combined-ca-bundle\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.722627 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-0\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.726456 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ssh-key\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.726954 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-inventory\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.734867 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sn6l\" (UniqueName: \"kubernetes.io/projected/01366752-fe13-4f55-be84-723e156bfdb1-kube-api-access-4sn6l\") pod \"telemetry-openstack-openstack-cell1-5whzr\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:00 crc kubenswrapper[4774]: I1121 16:21:00.821550 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:21:01 crc kubenswrapper[4774]: I1121 16:21:01.367065 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-openstack-openstack-cell1-5whzr"] Nov 21 16:21:01 crc kubenswrapper[4774]: I1121 16:21:01.375257 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:21:02 crc kubenswrapper[4774]: I1121 16:21:02.389884 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" event={"ID":"01366752-fe13-4f55-be84-723e156bfdb1","Type":"ContainerStarted","Data":"dfeab0696238e1f0fc85a55d6fb443f28c86e610f25e503ff603636b264daf75"} Nov 21 16:21:02 crc kubenswrapper[4774]: I1121 16:21:02.390209 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" event={"ID":"01366752-fe13-4f55-be84-723e156bfdb1","Type":"ContainerStarted","Data":"096dec8298b3d91018c89a712d179711e562ee67da593079d46d36e05c0878bd"} Nov 21 16:21:02 crc kubenswrapper[4774]: I1121 16:21:02.413197 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" podStartSLOduration=1.957626544 podStartE2EDuration="2.413182885s" podCreationTimestamp="2025-11-21 16:21:00 +0000 UTC" firstStartedPulling="2025-11-21 16:21:01.375021405 +0000 UTC m=+8252.027220664" lastFinishedPulling="2025-11-21 16:21:01.830577706 +0000 UTC m=+8252.482777005" observedRunningTime="2025-11-21 16:21:02.408363777 +0000 UTC m=+8253.060563056" watchObservedRunningTime="2025-11-21 16:21:02.413182885 +0000 UTC m=+8253.065382144" Nov 21 16:21:05 crc kubenswrapper[4774]: I1121 16:21:05.095345 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:21:05 crc kubenswrapper[4774]: E1121 16:21:05.097151 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:21:17 crc kubenswrapper[4774]: I1121 16:21:17.093423 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:21:17 crc kubenswrapper[4774]: E1121 16:21:17.094273 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:21:28 crc kubenswrapper[4774]: I1121 16:21:28.093386 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:21:28 crc kubenswrapper[4774]: E1121 16:21:28.094262 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:21:40 crc kubenswrapper[4774]: I1121 16:21:40.111542 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:21:40 crc kubenswrapper[4774]: I1121 16:21:40.833515 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"a4044e71c7522d0ed80ebb73f60f7155f02f13c474d29b8f93036898904c139e"} Nov 21 16:22:54 crc kubenswrapper[4774]: I1121 16:22:54.967317 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vvtg8"] Nov 21 16:22:54 crc kubenswrapper[4774]: I1121 16:22:54.970843 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:54 crc kubenswrapper[4774]: I1121 16:22:54.991494 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vvtg8"] Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.045916 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-catalog-content\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.045990 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgk4g\" (UniqueName: \"kubernetes.io/projected/1722701d-bd5b-4eca-adfe-4ec89dcee14f-kube-api-access-hgk4g\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.046285 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-utilities\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.148138 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-catalog-content\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.148210 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgk4g\" (UniqueName: \"kubernetes.io/projected/1722701d-bd5b-4eca-adfe-4ec89dcee14f-kube-api-access-hgk4g\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.148312 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-utilities\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.148801 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-utilities\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.149478 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-catalog-content\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.170571 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgk4g\" (UniqueName: \"kubernetes.io/projected/1722701d-bd5b-4eca-adfe-4ec89dcee14f-kube-api-access-hgk4g\") pod \"certified-operators-vvtg8\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.298418 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:22:55 crc kubenswrapper[4774]: I1121 16:22:55.778962 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vvtg8"] Nov 21 16:22:56 crc kubenswrapper[4774]: I1121 16:22:56.788316 4774 generic.go:334] "Generic (PLEG): container finished" podID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerID="21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013" exitCode=0 Nov 21 16:22:56 crc kubenswrapper[4774]: I1121 16:22:56.788428 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerDied","Data":"21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013"} Nov 21 16:22:56 crc kubenswrapper[4774]: I1121 16:22:56.788686 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerStarted","Data":"8dbacfc5f199bd607f00ba44f6f7aee2d7ef78bb34f8ab9c5a0ed0a6532740a3"} Nov 21 16:22:57 crc kubenswrapper[4774]: I1121 16:22:57.817185 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerStarted","Data":"2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821"} Nov 21 16:22:58 crc kubenswrapper[4774]: I1121 16:22:58.827555 4774 generic.go:334] "Generic (PLEG): container finished" podID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerID="2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821" exitCode=0 Nov 21 16:22:58 crc kubenswrapper[4774]: I1121 16:22:58.827629 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerDied","Data":"2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821"} Nov 21 16:22:59 crc kubenswrapper[4774]: I1121 16:22:59.840948 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerStarted","Data":"95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09"} Nov 21 16:22:59 crc kubenswrapper[4774]: I1121 16:22:59.868109 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vvtg8" podStartSLOduration=3.450354332 podStartE2EDuration="5.868087952s" podCreationTimestamp="2025-11-21 16:22:54 +0000 UTC" firstStartedPulling="2025-11-21 16:22:56.790536328 +0000 UTC m=+8367.442735587" lastFinishedPulling="2025-11-21 16:22:59.208269928 +0000 UTC m=+8369.860469207" observedRunningTime="2025-11-21 16:22:59.858681083 +0000 UTC m=+8370.510880352" watchObservedRunningTime="2025-11-21 16:22:59.868087952 +0000 UTC m=+8370.520287211" Nov 21 16:23:05 crc kubenswrapper[4774]: I1121 16:23:05.298539 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:23:05 crc kubenswrapper[4774]: I1121 16:23:05.298803 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:23:05 crc kubenswrapper[4774]: I1121 16:23:05.356337 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:23:05 crc kubenswrapper[4774]: I1121 16:23:05.965340 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:23:07 crc kubenswrapper[4774]: I1121 16:23:07.752240 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vvtg8"] Nov 21 16:23:07 crc kubenswrapper[4774]: I1121 16:23:07.926960 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vvtg8" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="registry-server" containerID="cri-o://95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09" gracePeriod=2 Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.411507 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.491598 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-utilities\") pod \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.491711 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgk4g\" (UniqueName: \"kubernetes.io/projected/1722701d-bd5b-4eca-adfe-4ec89dcee14f-kube-api-access-hgk4g\") pod \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.491845 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-catalog-content\") pod \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\" (UID: \"1722701d-bd5b-4eca-adfe-4ec89dcee14f\") " Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.492664 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-utilities" (OuterVolumeSpecName: "utilities") pod "1722701d-bd5b-4eca-adfe-4ec89dcee14f" (UID: "1722701d-bd5b-4eca-adfe-4ec89dcee14f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.499054 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1722701d-bd5b-4eca-adfe-4ec89dcee14f-kube-api-access-hgk4g" (OuterVolumeSpecName: "kube-api-access-hgk4g") pod "1722701d-bd5b-4eca-adfe-4ec89dcee14f" (UID: "1722701d-bd5b-4eca-adfe-4ec89dcee14f"). InnerVolumeSpecName "kube-api-access-hgk4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.555482 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1722701d-bd5b-4eca-adfe-4ec89dcee14f" (UID: "1722701d-bd5b-4eca-adfe-4ec89dcee14f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.594077 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.594111 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgk4g\" (UniqueName: \"kubernetes.io/projected/1722701d-bd5b-4eca-adfe-4ec89dcee14f-kube-api-access-hgk4g\") on node \"crc\" DevicePath \"\"" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.594124 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1722701d-bd5b-4eca-adfe-4ec89dcee14f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.942397 4774 generic.go:334] "Generic (PLEG): container finished" podID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerID="95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09" exitCode=0 Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.942454 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvtg8" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.942473 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerDied","Data":"95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09"} Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.942934 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvtg8" event={"ID":"1722701d-bd5b-4eca-adfe-4ec89dcee14f","Type":"ContainerDied","Data":"8dbacfc5f199bd607f00ba44f6f7aee2d7ef78bb34f8ab9c5a0ed0a6532740a3"} Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.942953 4774 scope.go:117] "RemoveContainer" containerID="95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.964189 4774 scope.go:117] "RemoveContainer" containerID="2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821" Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.982439 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vvtg8"] Nov 21 16:23:08 crc kubenswrapper[4774]: I1121 16:23:08.993466 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vvtg8"] Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.006423 4774 scope.go:117] "RemoveContainer" containerID="21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013" Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.041893 4774 scope.go:117] "RemoveContainer" containerID="95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09" Nov 21 16:23:09 crc kubenswrapper[4774]: E1121 16:23:09.042666 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09\": container with ID starting with 95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09 not found: ID does not exist" containerID="95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09" Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.042739 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09"} err="failed to get container status \"95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09\": rpc error: code = NotFound desc = could not find container \"95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09\": container with ID starting with 95b81da20fec027ec2fa26f9faa8a833469b88efabc7217a8d7eef992f1a9b09 not found: ID does not exist" Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.042783 4774 scope.go:117] "RemoveContainer" containerID="2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821" Nov 21 16:23:09 crc kubenswrapper[4774]: E1121 16:23:09.043531 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821\": container with ID starting with 2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821 not found: ID does not exist" containerID="2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821" Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.043594 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821"} err="failed to get container status \"2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821\": rpc error: code = NotFound desc = could not find container \"2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821\": container with ID starting with 2f767560483fb5d7373cef4460137b4536a24dc727b74007d7ce39b62c58a821 not found: ID does not exist" Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.043638 4774 scope.go:117] "RemoveContainer" containerID="21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013" Nov 21 16:23:09 crc kubenswrapper[4774]: E1121 16:23:09.044104 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013\": container with ID starting with 21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013 not found: ID does not exist" containerID="21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013" Nov 21 16:23:09 crc kubenswrapper[4774]: I1121 16:23:09.044143 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013"} err="failed to get container status \"21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013\": rpc error: code = NotFound desc = could not find container \"21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013\": container with ID starting with 21221bd0e7a7ed6cb3c3c1039fbe552f844bd99c829486227742f93571b00013 not found: ID does not exist" Nov 21 16:23:10 crc kubenswrapper[4774]: I1121 16:23:10.109606 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" path="/var/lib/kubelet/pods/1722701d-bd5b-4eca-adfe-4ec89dcee14f/volumes" Nov 21 16:23:59 crc kubenswrapper[4774]: I1121 16:23:59.600876 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:23:59 crc kubenswrapper[4774]: I1121 16:23:59.601637 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:24:29 crc kubenswrapper[4774]: I1121 16:24:29.600673 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:24:29 crc kubenswrapper[4774]: I1121 16:24:29.601284 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:24:52 crc kubenswrapper[4774]: I1121 16:24:52.006478 4774 generic.go:334] "Generic (PLEG): container finished" podID="01366752-fe13-4f55-be84-723e156bfdb1" containerID="dfeab0696238e1f0fc85a55d6fb443f28c86e610f25e503ff603636b264daf75" exitCode=0 Nov 21 16:24:52 crc kubenswrapper[4774]: I1121 16:24:52.006544 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" event={"ID":"01366752-fe13-4f55-be84-723e156bfdb1","Type":"ContainerDied","Data":"dfeab0696238e1f0fc85a55d6fb443f28c86e610f25e503ff603636b264daf75"} Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.473477 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.606572 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-1\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.606712 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sn6l\" (UniqueName: \"kubernetes.io/projected/01366752-fe13-4f55-be84-723e156bfdb1-kube-api-access-4sn6l\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.606757 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-inventory\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.606980 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-0\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.607036 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ssh-key\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.607109 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceph\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.607134 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-2\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.607165 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-telemetry-combined-ca-bundle\") pod \"01366752-fe13-4f55-be84-723e156bfdb1\" (UID: \"01366752-fe13-4f55-be84-723e156bfdb1\") " Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.613588 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceph" (OuterVolumeSpecName: "ceph") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.613626 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01366752-fe13-4f55-be84-723e156bfdb1-kube-api-access-4sn6l" (OuterVolumeSpecName: "kube-api-access-4sn6l") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "kube-api-access-4sn6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.613900 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.638636 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.640958 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.642983 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.656837 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-inventory" (OuterVolumeSpecName: "inventory") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.666560 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "01366752-fe13-4f55-be84-723e156bfdb1" (UID: "01366752-fe13-4f55-be84-723e156bfdb1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709061 4774 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709110 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709122 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709135 4774 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709149 4774 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709161 4774 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709173 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sn6l\" (UniqueName: \"kubernetes.io/projected/01366752-fe13-4f55-be84-723e156bfdb1-kube-api-access-4sn6l\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:53 crc kubenswrapper[4774]: I1121 16:24:53.709183 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/01366752-fe13-4f55-be84-723e156bfdb1-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.032147 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" event={"ID":"01366752-fe13-4f55-be84-723e156bfdb1","Type":"ContainerDied","Data":"096dec8298b3d91018c89a712d179711e562ee67da593079d46d36e05c0878bd"} Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.032460 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="096dec8298b3d91018c89a712d179711e562ee67da593079d46d36e05c0878bd" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.032248 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-openstack-openstack-cell1-5whzr" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.123808 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-xs7fw"] Nov 21 16:24:54 crc kubenswrapper[4774]: E1121 16:24:54.124381 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01366752-fe13-4f55-be84-723e156bfdb1" containerName="telemetry-openstack-openstack-cell1" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.124406 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="01366752-fe13-4f55-be84-723e156bfdb1" containerName="telemetry-openstack-openstack-cell1" Nov 21 16:24:54 crc kubenswrapper[4774]: E1121 16:24:54.124437 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="extract-utilities" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.124445 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="extract-utilities" Nov 21 16:24:54 crc kubenswrapper[4774]: E1121 16:24:54.124465 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="registry-server" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.124472 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="registry-server" Nov 21 16:24:54 crc kubenswrapper[4774]: E1121 16:24:54.124491 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="extract-content" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.124498 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="extract-content" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.124746 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="01366752-fe13-4f55-be84-723e156bfdb1" containerName="telemetry-openstack-openstack-cell1" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.124801 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="1722701d-bd5b-4eca-adfe-4ec89dcee14f" containerName="registry-server" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.125788 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.132111 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-sriov-agent-neutron-config" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.132212 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.132503 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.132688 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.135058 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.140651 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-xs7fw"] Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.220879 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.220950 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.221082 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.221152 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.221238 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.221260 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ggkq\" (UniqueName: \"kubernetes.io/projected/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-kube-api-access-5ggkq\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.322695 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.322796 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.322875 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.322964 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.322988 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ggkq\" (UniqueName: \"kubernetes.io/projected/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-kube-api-access-5ggkq\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.323054 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.327190 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ceph\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.327658 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ssh-key\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.327989 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.328273 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-inventory\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.328506 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.338905 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ggkq\" (UniqueName: \"kubernetes.io/projected/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-kube-api-access-5ggkq\") pod \"neutron-sriov-openstack-openstack-cell1-xs7fw\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.446586 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:24:54 crc kubenswrapper[4774]: I1121 16:24:54.958134 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-sriov-openstack-openstack-cell1-xs7fw"] Nov 21 16:24:55 crc kubenswrapper[4774]: I1121 16:24:55.047589 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" event={"ID":"180963e5-c4a2-40c8-9f16-26ea5b01cfbf","Type":"ContainerStarted","Data":"a420c001a2206189ac67dc5a05193e447190b286c6e0edecabb3fe37fecb2f1c"} Nov 21 16:24:56 crc kubenswrapper[4774]: I1121 16:24:56.061915 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" event={"ID":"180963e5-c4a2-40c8-9f16-26ea5b01cfbf","Type":"ContainerStarted","Data":"7f7ec9f8d09462cc11fd26d84178f37d3a2571ca4596c174f0b473ee1e9b1c8f"} Nov 21 16:24:56 crc kubenswrapper[4774]: I1121 16:24:56.078004 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" podStartSLOduration=1.601721577 podStartE2EDuration="2.077983258s" podCreationTimestamp="2025-11-21 16:24:54 +0000 UTC" firstStartedPulling="2025-11-21 16:24:54.96403426 +0000 UTC m=+8485.616233519" lastFinishedPulling="2025-11-21 16:24:55.440295941 +0000 UTC m=+8486.092495200" observedRunningTime="2025-11-21 16:24:56.076082014 +0000 UTC m=+8486.728281273" watchObservedRunningTime="2025-11-21 16:24:56.077983258 +0000 UTC m=+8486.730182517" Nov 21 16:24:59 crc kubenswrapper[4774]: I1121 16:24:59.600744 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:24:59 crc kubenswrapper[4774]: I1121 16:24:59.601596 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:24:59 crc kubenswrapper[4774]: I1121 16:24:59.601687 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:24:59 crc kubenswrapper[4774]: I1121 16:24:59.603108 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a4044e71c7522d0ed80ebb73f60f7155f02f13c474d29b8f93036898904c139e"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:24:59 crc kubenswrapper[4774]: I1121 16:24:59.603255 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://a4044e71c7522d0ed80ebb73f60f7155f02f13c474d29b8f93036898904c139e" gracePeriod=600 Nov 21 16:25:00 crc kubenswrapper[4774]: I1121 16:25:00.102474 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="a4044e71c7522d0ed80ebb73f60f7155f02f13c474d29b8f93036898904c139e" exitCode=0 Nov 21 16:25:00 crc kubenswrapper[4774]: I1121 16:25:00.123052 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"a4044e71c7522d0ed80ebb73f60f7155f02f13c474d29b8f93036898904c139e"} Nov 21 16:25:00 crc kubenswrapper[4774]: I1121 16:25:00.123568 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848"} Nov 21 16:25:00 crc kubenswrapper[4774]: I1121 16:25:00.123616 4774 scope.go:117] "RemoveContainer" containerID="22b87e40cf403d1d68ef6b93001bde2a7255c6c9d3340d7fb182ccdf51d86d8e" Nov 21 16:26:59 crc kubenswrapper[4774]: I1121 16:26:59.600686 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:26:59 crc kubenswrapper[4774]: I1121 16:26:59.601233 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:27:29 crc kubenswrapper[4774]: I1121 16:27:29.600521 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:27:29 crc kubenswrapper[4774]: I1121 16:27:29.601436 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.694710 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d7gpk"] Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.700244 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.724780 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d7gpk"] Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.846966 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mdxr\" (UniqueName: \"kubernetes.io/projected/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-kube-api-access-6mdxr\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.847135 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-utilities\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.847204 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-catalog-content\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.948911 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mdxr\" (UniqueName: \"kubernetes.io/projected/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-kube-api-access-6mdxr\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.949049 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-utilities\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.949088 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-catalog-content\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.949515 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-utilities\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.949637 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-catalog-content\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:50 crc kubenswrapper[4774]: I1121 16:27:50.969779 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mdxr\" (UniqueName: \"kubernetes.io/projected/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-kube-api-access-6mdxr\") pod \"redhat-marketplace-d7gpk\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:51 crc kubenswrapper[4774]: I1121 16:27:51.029899 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:27:51 crc kubenswrapper[4774]: I1121 16:27:51.498254 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d7gpk"] Nov 21 16:27:51 crc kubenswrapper[4774]: W1121 16:27:51.507490 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9eb31c25_e944_4e18_ae4c_1aa62c3c7c10.slice/crio-72a52280aa704dc8bd11daf148668d881740f7ec16e53520f006dce359aca174 WatchSource:0}: Error finding container 72a52280aa704dc8bd11daf148668d881740f7ec16e53520f006dce359aca174: Status 404 returned error can't find the container with id 72a52280aa704dc8bd11daf148668d881740f7ec16e53520f006dce359aca174 Nov 21 16:27:51 crc kubenswrapper[4774]: I1121 16:27:51.965346 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerStarted","Data":"72a52280aa704dc8bd11daf148668d881740f7ec16e53520f006dce359aca174"} Nov 21 16:27:52 crc kubenswrapper[4774]: I1121 16:27:52.977402 4774 generic.go:334] "Generic (PLEG): container finished" podID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerID="6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0" exitCode=0 Nov 21 16:27:52 crc kubenswrapper[4774]: I1121 16:27:52.977479 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerDied","Data":"6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0"} Nov 21 16:27:52 crc kubenswrapper[4774]: I1121 16:27:52.979909 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:27:53 crc kubenswrapper[4774]: I1121 16:27:53.993092 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerStarted","Data":"022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203"} Nov 21 16:27:55 crc kubenswrapper[4774]: I1121 16:27:55.004681 4774 generic.go:334] "Generic (PLEG): container finished" podID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerID="022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203" exitCode=0 Nov 21 16:27:55 crc kubenswrapper[4774]: I1121 16:27:55.004750 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerDied","Data":"022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203"} Nov 21 16:27:56 crc kubenswrapper[4774]: I1121 16:27:56.023817 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerStarted","Data":"e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc"} Nov 21 16:27:56 crc kubenswrapper[4774]: I1121 16:27:56.044243 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d7gpk" podStartSLOduration=3.61540297 podStartE2EDuration="6.044223341s" podCreationTimestamp="2025-11-21 16:27:50 +0000 UTC" firstStartedPulling="2025-11-21 16:27:52.979543515 +0000 UTC m=+8663.631742784" lastFinishedPulling="2025-11-21 16:27:55.408363896 +0000 UTC m=+8666.060563155" observedRunningTime="2025-11-21 16:27:56.041300158 +0000 UTC m=+8666.693499457" watchObservedRunningTime="2025-11-21 16:27:56.044223341 +0000 UTC m=+8666.696422600" Nov 21 16:27:59 crc kubenswrapper[4774]: I1121 16:27:59.601082 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:27:59 crc kubenswrapper[4774]: I1121 16:27:59.601596 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:27:59 crc kubenswrapper[4774]: I1121 16:27:59.601637 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:27:59 crc kubenswrapper[4774]: I1121 16:27:59.602544 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:27:59 crc kubenswrapper[4774]: I1121 16:27:59.602712 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" gracePeriod=600 Nov 21 16:28:00 crc kubenswrapper[4774]: I1121 16:28:00.067840 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848"} Nov 21 16:28:00 crc kubenswrapper[4774]: I1121 16:28:00.067901 4774 scope.go:117] "RemoveContainer" containerID="a4044e71c7522d0ed80ebb73f60f7155f02f13c474d29b8f93036898904c139e" Nov 21 16:28:00 crc kubenswrapper[4774]: I1121 16:28:00.067807 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" exitCode=0 Nov 21 16:28:00 crc kubenswrapper[4774]: E1121 16:28:00.228465 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:28:01 crc kubenswrapper[4774]: I1121 16:28:01.030518 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:28:01 crc kubenswrapper[4774]: I1121 16:28:01.030734 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:28:01 crc kubenswrapper[4774]: I1121 16:28:01.083072 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:28:01 crc kubenswrapper[4774]: E1121 16:28:01.083444 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:28:01 crc kubenswrapper[4774]: I1121 16:28:01.083749 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:28:01 crc kubenswrapper[4774]: I1121 16:28:01.153245 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:28:01 crc kubenswrapper[4774]: I1121 16:28:01.353761 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d7gpk"] Nov 21 16:28:03 crc kubenswrapper[4774]: I1121 16:28:03.099934 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d7gpk" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="registry-server" containerID="cri-o://e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc" gracePeriod=2 Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.094406 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.121085 4774 generic.go:334] "Generic (PLEG): container finished" podID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerID="e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc" exitCode=0 Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.121129 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerDied","Data":"e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc"} Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.121155 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d7gpk" event={"ID":"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10","Type":"ContainerDied","Data":"72a52280aa704dc8bd11daf148668d881740f7ec16e53520f006dce359aca174"} Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.121171 4774 scope.go:117] "RemoveContainer" containerID="e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.121278 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d7gpk" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.145379 4774 scope.go:117] "RemoveContainer" containerID="022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.164417 4774 scope.go:117] "RemoveContainer" containerID="6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.231580 4774 scope.go:117] "RemoveContainer" containerID="e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc" Nov 21 16:28:04 crc kubenswrapper[4774]: E1121 16:28:04.232197 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc\": container with ID starting with e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc not found: ID does not exist" containerID="e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.232238 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc"} err="failed to get container status \"e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc\": rpc error: code = NotFound desc = could not find container \"e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc\": container with ID starting with e27a08447d6899319dd0787d78d461cff105dc7731c854db9fe786c3673bdacc not found: ID does not exist" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.232269 4774 scope.go:117] "RemoveContainer" containerID="022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203" Nov 21 16:28:04 crc kubenswrapper[4774]: E1121 16:28:04.232671 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203\": container with ID starting with 022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203 not found: ID does not exist" containerID="022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.232722 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203"} err="failed to get container status \"022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203\": rpc error: code = NotFound desc = could not find container \"022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203\": container with ID starting with 022a39157863e8d16af7c2badbc46ed2514201d0ebec0b927598f24736bf1203 not found: ID does not exist" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.232742 4774 scope.go:117] "RemoveContainer" containerID="6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0" Nov 21 16:28:04 crc kubenswrapper[4774]: E1121 16:28:04.233107 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0\": container with ID starting with 6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0 not found: ID does not exist" containerID="6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.233157 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0"} err="failed to get container status \"6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0\": rpc error: code = NotFound desc = could not find container \"6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0\": container with ID starting with 6414ee3eec40d57f719e596d0aca503450eca07b12a0cc735e2e5748becad9d0 not found: ID does not exist" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.260113 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-utilities\") pod \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.260708 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mdxr\" (UniqueName: \"kubernetes.io/projected/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-kube-api-access-6mdxr\") pod \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.260807 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-catalog-content\") pod \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\" (UID: \"9eb31c25-e944-4e18-ae4c-1aa62c3c7c10\") " Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.261570 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-utilities" (OuterVolumeSpecName: "utilities") pod "9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" (UID: "9eb31c25-e944-4e18-ae4c-1aa62c3c7c10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.261844 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.268689 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-kube-api-access-6mdxr" (OuterVolumeSpecName: "kube-api-access-6mdxr") pod "9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" (UID: "9eb31c25-e944-4e18-ae4c-1aa62c3c7c10"). InnerVolumeSpecName "kube-api-access-6mdxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.282917 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" (UID: "9eb31c25-e944-4e18-ae4c-1aa62c3c7c10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.363792 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mdxr\" (UniqueName: \"kubernetes.io/projected/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-kube-api-access-6mdxr\") on node \"crc\" DevicePath \"\"" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.363880 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.461528 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d7gpk"] Nov 21 16:28:04 crc kubenswrapper[4774]: I1121 16:28:04.471763 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d7gpk"] Nov 21 16:28:06 crc kubenswrapper[4774]: I1121 16:28:06.108414 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" path="/var/lib/kubelet/pods/9eb31c25-e944-4e18-ae4c-1aa62c3c7c10/volumes" Nov 21 16:28:13 crc kubenswrapper[4774]: I1121 16:28:13.093342 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:28:13 crc kubenswrapper[4774]: E1121 16:28:13.094243 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:28:27 crc kubenswrapper[4774]: I1121 16:28:27.093674 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:28:27 crc kubenswrapper[4774]: E1121 16:28:27.094431 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:28:42 crc kubenswrapper[4774]: I1121 16:28:42.093325 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:28:42 crc kubenswrapper[4774]: E1121 16:28:42.094212 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:28:57 crc kubenswrapper[4774]: I1121 16:28:57.093649 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:28:57 crc kubenswrapper[4774]: E1121 16:28:57.094317 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:28:59 crc kubenswrapper[4774]: I1121 16:28:59.696710 4774 generic.go:334] "Generic (PLEG): container finished" podID="180963e5-c4a2-40c8-9f16-26ea5b01cfbf" containerID="7f7ec9f8d09462cc11fd26d84178f37d3a2571ca4596c174f0b473ee1e9b1c8f" exitCode=0 Nov 21 16:28:59 crc kubenswrapper[4774]: I1121 16:28:59.696814 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" event={"ID":"180963e5-c4a2-40c8-9f16-26ea5b01cfbf","Type":"ContainerDied","Data":"7f7ec9f8d09462cc11fd26d84178f37d3a2571ca4596c174f0b473ee1e9b1c8f"} Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.657736 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.721310 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" event={"ID":"180963e5-c4a2-40c8-9f16-26ea5b01cfbf","Type":"ContainerDied","Data":"a420c001a2206189ac67dc5a05193e447190b286c6e0edecabb3fe37fecb2f1c"} Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.721361 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a420c001a2206189ac67dc5a05193e447190b286c6e0edecabb3fe37fecb2f1c" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.721417 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-sriov-openstack-openstack-cell1-xs7fw" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.743980 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-agent-neutron-config-0\") pod \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.744119 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ceph\") pod \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.744157 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ssh-key\") pod \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.744258 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ggkq\" (UniqueName: \"kubernetes.io/projected/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-kube-api-access-5ggkq\") pod \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.744292 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-inventory\") pod \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.744338 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-combined-ca-bundle\") pod \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\" (UID: \"180963e5-c4a2-40c8-9f16-26ea5b01cfbf\") " Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.752839 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-kube-api-access-5ggkq" (OuterVolumeSpecName: "kube-api-access-5ggkq") pod "180963e5-c4a2-40c8-9f16-26ea5b01cfbf" (UID: "180963e5-c4a2-40c8-9f16-26ea5b01cfbf"). InnerVolumeSpecName "kube-api-access-5ggkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.753379 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "180963e5-c4a2-40c8-9f16-26ea5b01cfbf" (UID: "180963e5-c4a2-40c8-9f16-26ea5b01cfbf"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.768009 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ceph" (OuterVolumeSpecName: "ceph") pod "180963e5-c4a2-40c8-9f16-26ea5b01cfbf" (UID: "180963e5-c4a2-40c8-9f16-26ea5b01cfbf"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.791050 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "180963e5-c4a2-40c8-9f16-26ea5b01cfbf" (UID: "180963e5-c4a2-40c8-9f16-26ea5b01cfbf"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.795031 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-inventory" (OuterVolumeSpecName: "inventory") pod "180963e5-c4a2-40c8-9f16-26ea5b01cfbf" (UID: "180963e5-c4a2-40c8-9f16-26ea5b01cfbf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.824462 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "180963e5-c4a2-40c8-9f16-26ea5b01cfbf" (UID: "180963e5-c4a2-40c8-9f16-26ea5b01cfbf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.836764 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq"] Nov 21 16:29:01 crc kubenswrapper[4774]: E1121 16:29:01.837253 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="extract-content" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.837272 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="extract-content" Nov 21 16:29:01 crc kubenswrapper[4774]: E1121 16:29:01.838365 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180963e5-c4a2-40c8-9f16-26ea5b01cfbf" containerName="neutron-sriov-openstack-openstack-cell1" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.838381 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="180963e5-c4a2-40c8-9f16-26ea5b01cfbf" containerName="neutron-sriov-openstack-openstack-cell1" Nov 21 16:29:01 crc kubenswrapper[4774]: E1121 16:29:01.838458 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="extract-utilities" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.838467 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="extract-utilities" Nov 21 16:29:01 crc kubenswrapper[4774]: E1121 16:29:01.838505 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="registry-server" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.838511 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="registry-server" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.838779 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="9eb31c25-e944-4e18-ae4c-1aa62c3c7c10" containerName="registry-server" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.838804 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="180963e5-c4a2-40c8-9f16-26ea5b01cfbf" containerName="neutron-sriov-openstack-openstack-cell1" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.841644 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.844578 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-dhcp-agent-neutron-config" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.846722 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ggkq\" (UniqueName: \"kubernetes.io/projected/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-kube-api-access-5ggkq\") on node \"crc\" DevicePath \"\"" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.847437 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.847515 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.847589 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.847647 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.847702 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/180963e5-c4a2-40c8-9f16-26ea5b01cfbf-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.856653 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq"] Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.949935 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.950038 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzr6q\" (UniqueName: \"kubernetes.io/projected/56441432-7ab5-47a5-85fc-d584e665625e-kube-api-access-wzr6q\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.950250 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.950348 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.950451 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:01 crc kubenswrapper[4774]: I1121 16:29:01.950510 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.052892 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.052990 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.053058 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.053092 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.053152 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.053208 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzr6q\" (UniqueName: \"kubernetes.io/projected/56441432-7ab5-47a5-85fc-d584e665625e-kube-api-access-wzr6q\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.059044 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ceph\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.059162 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-inventory\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.059875 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ssh-key\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.060191 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.060472 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.074302 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzr6q\" (UniqueName: \"kubernetes.io/projected/56441432-7ab5-47a5-85fc-d584e665625e-kube-api-access-wzr6q\") pod \"neutron-dhcp-openstack-openstack-cell1-8l9vq\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.205759 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:29:02 crc kubenswrapper[4774]: I1121 16:29:02.771180 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq"] Nov 21 16:29:03 crc kubenswrapper[4774]: I1121 16:29:03.752044 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" event={"ID":"56441432-7ab5-47a5-85fc-d584e665625e","Type":"ContainerStarted","Data":"e661ed6e339b1db911e0c745345fa574f9c83509ddf5274494c865220afa427f"} Nov 21 16:29:04 crc kubenswrapper[4774]: I1121 16:29:04.764973 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" event={"ID":"56441432-7ab5-47a5-85fc-d584e665625e","Type":"ContainerStarted","Data":"1aee94be8b78b49b8b277a08612df38d781ec48198585a49d0633032c583a075"} Nov 21 16:29:04 crc kubenswrapper[4774]: I1121 16:29:04.796191 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" podStartSLOduration=3.330810972 podStartE2EDuration="3.796173061s" podCreationTimestamp="2025-11-21 16:29:01 +0000 UTC" firstStartedPulling="2025-11-21 16:29:02.77995156 +0000 UTC m=+8733.432150859" lastFinishedPulling="2025-11-21 16:29:03.245313689 +0000 UTC m=+8733.897512948" observedRunningTime="2025-11-21 16:29:04.785161346 +0000 UTC m=+8735.437360645" watchObservedRunningTime="2025-11-21 16:29:04.796173061 +0000 UTC m=+8735.448372310" Nov 21 16:29:11 crc kubenswrapper[4774]: I1121 16:29:11.094048 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:29:11 crc kubenswrapper[4774]: E1121 16:29:11.095426 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:29:23 crc kubenswrapper[4774]: I1121 16:29:23.093730 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:29:23 crc kubenswrapper[4774]: E1121 16:29:23.094987 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:29:36 crc kubenswrapper[4774]: I1121 16:29:36.093854 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:29:36 crc kubenswrapper[4774]: E1121 16:29:36.095561 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:29:51 crc kubenswrapper[4774]: I1121 16:29:51.094338 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:29:51 crc kubenswrapper[4774]: E1121 16:29:51.095336 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.149910 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc"] Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.153255 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.155859 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.155949 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.161904 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc"] Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.273267 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89ed5197-f2e3-4dc3-9e36-85a813304545-config-volume\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.273572 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vzk5\" (UniqueName: \"kubernetes.io/projected/89ed5197-f2e3-4dc3-9e36-85a813304545-kube-api-access-6vzk5\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.273809 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89ed5197-f2e3-4dc3-9e36-85a813304545-secret-volume\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.375533 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89ed5197-f2e3-4dc3-9e36-85a813304545-secret-volume\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.375747 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89ed5197-f2e3-4dc3-9e36-85a813304545-config-volume\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.375833 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vzk5\" (UniqueName: \"kubernetes.io/projected/89ed5197-f2e3-4dc3-9e36-85a813304545-kube-api-access-6vzk5\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.377252 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89ed5197-f2e3-4dc3-9e36-85a813304545-config-volume\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.382135 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89ed5197-f2e3-4dc3-9e36-85a813304545-secret-volume\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.391540 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vzk5\" (UniqueName: \"kubernetes.io/projected/89ed5197-f2e3-4dc3-9e36-85a813304545-kube-api-access-6vzk5\") pod \"collect-profiles-29395710-9g7fc\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.474122 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:00 crc kubenswrapper[4774]: I1121 16:30:00.945898 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc"] Nov 21 16:30:01 crc kubenswrapper[4774]: I1121 16:30:01.407046 4774 generic.go:334] "Generic (PLEG): container finished" podID="89ed5197-f2e3-4dc3-9e36-85a813304545" containerID="9f584a1f595e7bc4377777e293f5d48ada7a7c09d52bca0142df37736a6c241b" exitCode=0 Nov 21 16:30:01 crc kubenswrapper[4774]: I1121 16:30:01.407114 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" event={"ID":"89ed5197-f2e3-4dc3-9e36-85a813304545","Type":"ContainerDied","Data":"9f584a1f595e7bc4377777e293f5d48ada7a7c09d52bca0142df37736a6c241b"} Nov 21 16:30:01 crc kubenswrapper[4774]: I1121 16:30:01.407377 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" event={"ID":"89ed5197-f2e3-4dc3-9e36-85a813304545","Type":"ContainerStarted","Data":"bec291d60d4868b5fdb16b38ec8cf1db7194e9f8982c9b2b541c6f4bf4295eb4"} Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.094039 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:30:02 crc kubenswrapper[4774]: E1121 16:30:02.094447 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.813720 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.929941 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vzk5\" (UniqueName: \"kubernetes.io/projected/89ed5197-f2e3-4dc3-9e36-85a813304545-kube-api-access-6vzk5\") pod \"89ed5197-f2e3-4dc3-9e36-85a813304545\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.930253 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89ed5197-f2e3-4dc3-9e36-85a813304545-secret-volume\") pod \"89ed5197-f2e3-4dc3-9e36-85a813304545\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.930300 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89ed5197-f2e3-4dc3-9e36-85a813304545-config-volume\") pod \"89ed5197-f2e3-4dc3-9e36-85a813304545\" (UID: \"89ed5197-f2e3-4dc3-9e36-85a813304545\") " Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.931584 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89ed5197-f2e3-4dc3-9e36-85a813304545-config-volume" (OuterVolumeSpecName: "config-volume") pod "89ed5197-f2e3-4dc3-9e36-85a813304545" (UID: "89ed5197-f2e3-4dc3-9e36-85a813304545"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.941982 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89ed5197-f2e3-4dc3-9e36-85a813304545-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "89ed5197-f2e3-4dc3-9e36-85a813304545" (UID: "89ed5197-f2e3-4dc3-9e36-85a813304545"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:30:02 crc kubenswrapper[4774]: I1121 16:30:02.958151 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89ed5197-f2e3-4dc3-9e36-85a813304545-kube-api-access-6vzk5" (OuterVolumeSpecName: "kube-api-access-6vzk5") pod "89ed5197-f2e3-4dc3-9e36-85a813304545" (UID: "89ed5197-f2e3-4dc3-9e36-85a813304545"). InnerVolumeSpecName "kube-api-access-6vzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.032778 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/89ed5197-f2e3-4dc3-9e36-85a813304545-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.032908 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/89ed5197-f2e3-4dc3-9e36-85a813304545-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.032923 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vzk5\" (UniqueName: \"kubernetes.io/projected/89ed5197-f2e3-4dc3-9e36-85a813304545-kube-api-access-6vzk5\") on node \"crc\" DevicePath \"\"" Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.431013 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" event={"ID":"89ed5197-f2e3-4dc3-9e36-85a813304545","Type":"ContainerDied","Data":"bec291d60d4868b5fdb16b38ec8cf1db7194e9f8982c9b2b541c6f4bf4295eb4"} Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.431060 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bec291d60d4868b5fdb16b38ec8cf1db7194e9f8982c9b2b541c6f4bf4295eb4" Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.431417 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395710-9g7fc" Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.920032 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p"] Nov 21 16:30:03 crc kubenswrapper[4774]: I1121 16:30:03.924410 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395665-6c56p"] Nov 21 16:30:04 crc kubenswrapper[4774]: I1121 16:30:04.116715 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f72fcfa5-b81f-448f-8aa5-0134d627c92b" path="/var/lib/kubelet/pods/f72fcfa5-b81f-448f-8aa5-0134d627c92b/volumes" Nov 21 16:30:17 crc kubenswrapper[4774]: I1121 16:30:17.093512 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:30:17 crc kubenswrapper[4774]: E1121 16:30:17.094294 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:30:32 crc kubenswrapper[4774]: I1121 16:30:32.094237 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:30:32 crc kubenswrapper[4774]: E1121 16:30:32.095344 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:30:47 crc kubenswrapper[4774]: I1121 16:30:47.093746 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:30:47 crc kubenswrapper[4774]: E1121 16:30:47.095660 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:30:54 crc kubenswrapper[4774]: I1121 16:30:54.014732 4774 scope.go:117] "RemoveContainer" containerID="8f765fb1a65ddb9f4235e669113327c96ddc6eb87e97f4bbc795be02a5234508" Nov 21 16:31:01 crc kubenswrapper[4774]: I1121 16:31:01.092809 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:31:01 crc kubenswrapper[4774]: E1121 16:31:01.093709 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:31:12 crc kubenswrapper[4774]: I1121 16:31:12.093572 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:31:12 crc kubenswrapper[4774]: E1121 16:31:12.094599 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:31:25 crc kubenswrapper[4774]: I1121 16:31:25.095936 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:31:25 crc kubenswrapper[4774]: E1121 16:31:25.097029 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:31:40 crc kubenswrapper[4774]: I1121 16:31:40.102534 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:31:40 crc kubenswrapper[4774]: E1121 16:31:40.103860 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:31:51 crc kubenswrapper[4774]: I1121 16:31:51.094176 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:31:51 crc kubenswrapper[4774]: E1121 16:31:51.095110 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.173580 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kkc4f"] Nov 21 16:31:52 crc kubenswrapper[4774]: E1121 16:31:52.174245 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89ed5197-f2e3-4dc3-9e36-85a813304545" containerName="collect-profiles" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.174266 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="89ed5197-f2e3-4dc3-9e36-85a813304545" containerName="collect-profiles" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.174644 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="89ed5197-f2e3-4dc3-9e36-85a813304545" containerName="collect-profiles" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.177034 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.186371 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kkc4f"] Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.284340 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq4pf\" (UniqueName: \"kubernetes.io/projected/c3467477-e5f5-45ef-9910-a109cf5f73b7-kube-api-access-cq4pf\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.284984 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-catalog-content\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.285046 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-utilities\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.386948 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-catalog-content\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.387063 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-utilities\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.387599 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-catalog-content\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.387621 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-utilities\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.387955 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq4pf\" (UniqueName: \"kubernetes.io/projected/c3467477-e5f5-45ef-9910-a109cf5f73b7-kube-api-access-cq4pf\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.408855 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq4pf\" (UniqueName: \"kubernetes.io/projected/c3467477-e5f5-45ef-9910-a109cf5f73b7-kube-api-access-cq4pf\") pod \"community-operators-kkc4f\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:52 crc kubenswrapper[4774]: I1121 16:31:52.507389 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:31:53 crc kubenswrapper[4774]: I1121 16:31:53.058177 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kkc4f"] Nov 21 16:31:53 crc kubenswrapper[4774]: I1121 16:31:53.630143 4774 generic.go:334] "Generic (PLEG): container finished" podID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerID="da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67" exitCode=0 Nov 21 16:31:53 crc kubenswrapper[4774]: I1121 16:31:53.630256 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kkc4f" event={"ID":"c3467477-e5f5-45ef-9910-a109cf5f73b7","Type":"ContainerDied","Data":"da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67"} Nov 21 16:31:53 crc kubenswrapper[4774]: I1121 16:31:53.630475 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kkc4f" event={"ID":"c3467477-e5f5-45ef-9910-a109cf5f73b7","Type":"ContainerStarted","Data":"48d81b08634f58a0890f178f2f993df453fbfd6b7d07e6d8ed23a44cdc5c1d43"} Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.590403 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2mhzc"] Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.592918 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.600709 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2mhzc"] Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.646773 4774 generic.go:334] "Generic (PLEG): container finished" podID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerID="14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2" exitCode=0 Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.647115 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kkc4f" event={"ID":"c3467477-e5f5-45ef-9910-a109cf5f73b7","Type":"ContainerDied","Data":"14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2"} Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.745941 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldkcc\" (UniqueName: \"kubernetes.io/projected/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-kube-api-access-ldkcc\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.746287 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-catalog-content\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.746686 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-utilities\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.849453 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-catalog-content\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.849681 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-utilities\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.850043 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-catalog-content\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.850117 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-utilities\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.850432 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldkcc\" (UniqueName: \"kubernetes.io/projected/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-kube-api-access-ldkcc\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.870589 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldkcc\" (UniqueName: \"kubernetes.io/projected/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-kube-api-access-ldkcc\") pod \"redhat-operators-2mhzc\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:54 crc kubenswrapper[4774]: I1121 16:31:54.997570 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:31:55 crc kubenswrapper[4774]: I1121 16:31:55.523667 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2mhzc"] Nov 21 16:31:55 crc kubenswrapper[4774]: W1121 16:31:55.531319 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4fe5bb8b_b990_4851_8a1a_3567eb55ae58.slice/crio-fd9cdc12ec0e96b7e8241a7e54e4976c807347c506941a8fc019f49646253359 WatchSource:0}: Error finding container fd9cdc12ec0e96b7e8241a7e54e4976c807347c506941a8fc019f49646253359: Status 404 returned error can't find the container with id fd9cdc12ec0e96b7e8241a7e54e4976c807347c506941a8fc019f49646253359 Nov 21 16:31:55 crc kubenswrapper[4774]: I1121 16:31:55.658170 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kkc4f" event={"ID":"c3467477-e5f5-45ef-9910-a109cf5f73b7","Type":"ContainerStarted","Data":"27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae"} Nov 21 16:31:55 crc kubenswrapper[4774]: I1121 16:31:55.659878 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerStarted","Data":"fd9cdc12ec0e96b7e8241a7e54e4976c807347c506941a8fc019f49646253359"} Nov 21 16:31:55 crc kubenswrapper[4774]: I1121 16:31:55.690164 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kkc4f" podStartSLOduration=2.263870428 podStartE2EDuration="3.690146779s" podCreationTimestamp="2025-11-21 16:31:52 +0000 UTC" firstStartedPulling="2025-11-21 16:31:53.632128862 +0000 UTC m=+8904.284328121" lastFinishedPulling="2025-11-21 16:31:55.058405213 +0000 UTC m=+8905.710604472" observedRunningTime="2025-11-21 16:31:55.682116599 +0000 UTC m=+8906.334315858" watchObservedRunningTime="2025-11-21 16:31:55.690146779 +0000 UTC m=+8906.342346038" Nov 21 16:31:56 crc kubenswrapper[4774]: I1121 16:31:56.672849 4774 generic.go:334] "Generic (PLEG): container finished" podID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerID="95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940" exitCode=0 Nov 21 16:31:56 crc kubenswrapper[4774]: I1121 16:31:56.672986 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerDied","Data":"95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940"} Nov 21 16:31:58 crc kubenswrapper[4774]: I1121 16:31:58.694672 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerStarted","Data":"4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d"} Nov 21 16:32:02 crc kubenswrapper[4774]: I1121 16:32:02.507795 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:32:02 crc kubenswrapper[4774]: I1121 16:32:02.508530 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:32:02 crc kubenswrapper[4774]: I1121 16:32:02.589682 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:32:02 crc kubenswrapper[4774]: I1121 16:32:02.742679 4774 generic.go:334] "Generic (PLEG): container finished" podID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerID="4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d" exitCode=0 Nov 21 16:32:02 crc kubenswrapper[4774]: I1121 16:32:02.742764 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerDied","Data":"4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d"} Nov 21 16:32:02 crc kubenswrapper[4774]: I1121 16:32:02.795516 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:32:03 crc kubenswrapper[4774]: I1121 16:32:03.754959 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerStarted","Data":"b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917"} Nov 21 16:32:03 crc kubenswrapper[4774]: I1121 16:32:03.779411 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2mhzc" podStartSLOduration=3.338604884 podStartE2EDuration="9.779388743s" podCreationTimestamp="2025-11-21 16:31:54 +0000 UTC" firstStartedPulling="2025-11-21 16:31:56.67550411 +0000 UTC m=+8907.327703369" lastFinishedPulling="2025-11-21 16:32:03.116287969 +0000 UTC m=+8913.768487228" observedRunningTime="2025-11-21 16:32:03.772325191 +0000 UTC m=+8914.424524450" watchObservedRunningTime="2025-11-21 16:32:03.779388743 +0000 UTC m=+8914.431588002" Nov 21 16:32:03 crc kubenswrapper[4774]: I1121 16:32:03.831484 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kkc4f"] Nov 21 16:32:04 crc kubenswrapper[4774]: I1121 16:32:04.763873 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kkc4f" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="registry-server" containerID="cri-o://27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae" gracePeriod=2 Nov 21 16:32:04 crc kubenswrapper[4774]: I1121 16:32:04.997794 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:32:04 crc kubenswrapper[4774]: I1121 16:32:04.998115 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.093036 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:32:05 crc kubenswrapper[4774]: E1121 16:32:05.093489 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.287816 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.417246 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-catalog-content\") pod \"c3467477-e5f5-45ef-9910-a109cf5f73b7\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.417389 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq4pf\" (UniqueName: \"kubernetes.io/projected/c3467477-e5f5-45ef-9910-a109cf5f73b7-kube-api-access-cq4pf\") pod \"c3467477-e5f5-45ef-9910-a109cf5f73b7\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.417586 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-utilities\") pod \"c3467477-e5f5-45ef-9910-a109cf5f73b7\" (UID: \"c3467477-e5f5-45ef-9910-a109cf5f73b7\") " Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.418320 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-utilities" (OuterVolumeSpecName: "utilities") pod "c3467477-e5f5-45ef-9910-a109cf5f73b7" (UID: "c3467477-e5f5-45ef-9910-a109cf5f73b7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.418709 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.423777 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3467477-e5f5-45ef-9910-a109cf5f73b7-kube-api-access-cq4pf" (OuterVolumeSpecName: "kube-api-access-cq4pf") pod "c3467477-e5f5-45ef-9910-a109cf5f73b7" (UID: "c3467477-e5f5-45ef-9910-a109cf5f73b7"). InnerVolumeSpecName "kube-api-access-cq4pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.467384 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3467477-e5f5-45ef-9910-a109cf5f73b7" (UID: "c3467477-e5f5-45ef-9910-a109cf5f73b7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.520439 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3467477-e5f5-45ef-9910-a109cf5f73b7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.520489 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq4pf\" (UniqueName: \"kubernetes.io/projected/c3467477-e5f5-45ef-9910-a109cf5f73b7-kube-api-access-cq4pf\") on node \"crc\" DevicePath \"\"" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.774223 4774 generic.go:334] "Generic (PLEG): container finished" podID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerID="27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae" exitCode=0 Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.774271 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kkc4f" event={"ID":"c3467477-e5f5-45ef-9910-a109cf5f73b7","Type":"ContainerDied","Data":"27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae"} Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.774299 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kkc4f" event={"ID":"c3467477-e5f5-45ef-9910-a109cf5f73b7","Type":"ContainerDied","Data":"48d81b08634f58a0890f178f2f993df453fbfd6b7d07e6d8ed23a44cdc5c1d43"} Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.774324 4774 scope.go:117] "RemoveContainer" containerID="27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.774323 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kkc4f" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.795328 4774 scope.go:117] "RemoveContainer" containerID="14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.811973 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kkc4f"] Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.818862 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kkc4f"] Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.834502 4774 scope.go:117] "RemoveContainer" containerID="da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.887279 4774 scope.go:117] "RemoveContainer" containerID="27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae" Nov 21 16:32:05 crc kubenswrapper[4774]: E1121 16:32:05.887742 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae\": container with ID starting with 27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae not found: ID does not exist" containerID="27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.887799 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae"} err="failed to get container status \"27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae\": rpc error: code = NotFound desc = could not find container \"27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae\": container with ID starting with 27e332b613281d4da8f40c9dabe3d9d7abe542b00eebfe0d899459bad2beafae not found: ID does not exist" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.887892 4774 scope.go:117] "RemoveContainer" containerID="14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2" Nov 21 16:32:05 crc kubenswrapper[4774]: E1121 16:32:05.888338 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2\": container with ID starting with 14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2 not found: ID does not exist" containerID="14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.888380 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2"} err="failed to get container status \"14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2\": rpc error: code = NotFound desc = could not find container \"14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2\": container with ID starting with 14737b05f556dcd384daa5a149e885c57fb82366ea1da46b1fe0eba0369d15d2 not found: ID does not exist" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.888408 4774 scope.go:117] "RemoveContainer" containerID="da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67" Nov 21 16:32:05 crc kubenswrapper[4774]: E1121 16:32:05.888744 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67\": container with ID starting with da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67 not found: ID does not exist" containerID="da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67" Nov 21 16:32:05 crc kubenswrapper[4774]: I1121 16:32:05.888778 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67"} err="failed to get container status \"da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67\": rpc error: code = NotFound desc = could not find container \"da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67\": container with ID starting with da777d73706c011f516a161a6bce46f59bf6b0add2cdc5ffa8bf1287ab52ad67 not found: ID does not exist" Nov 21 16:32:06 crc kubenswrapper[4774]: I1121 16:32:06.047127 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2mhzc" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" probeResult="failure" output=< Nov 21 16:32:06 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:32:06 crc kubenswrapper[4774]: > Nov 21 16:32:06 crc kubenswrapper[4774]: I1121 16:32:06.106528 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" path="/var/lib/kubelet/pods/c3467477-e5f5-45ef-9910-a109cf5f73b7/volumes" Nov 21 16:32:16 crc kubenswrapper[4774]: I1121 16:32:16.052715 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2mhzc" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" probeResult="failure" output=< Nov 21 16:32:16 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:32:16 crc kubenswrapper[4774]: > Nov 21 16:32:16 crc kubenswrapper[4774]: I1121 16:32:16.093866 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:32:16 crc kubenswrapper[4774]: E1121 16:32:16.094180 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:32:26 crc kubenswrapper[4774]: I1121 16:32:26.049171 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2mhzc" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" probeResult="failure" output=< Nov 21 16:32:26 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:32:26 crc kubenswrapper[4774]: > Nov 21 16:32:28 crc kubenswrapper[4774]: I1121 16:32:28.093626 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:32:28 crc kubenswrapper[4774]: E1121 16:32:28.094569 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:32:35 crc kubenswrapper[4774]: I1121 16:32:35.050175 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:32:35 crc kubenswrapper[4774]: I1121 16:32:35.108424 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:32:35 crc kubenswrapper[4774]: I1121 16:32:35.294315 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2mhzc"] Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.080324 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2mhzc" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" containerID="cri-o://b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917" gracePeriod=2 Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.544663 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.714998 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-utilities\") pod \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.715136 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-catalog-content\") pod \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.715299 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldkcc\" (UniqueName: \"kubernetes.io/projected/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-kube-api-access-ldkcc\") pod \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\" (UID: \"4fe5bb8b-b990-4851-8a1a-3567eb55ae58\") " Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.716213 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-utilities" (OuterVolumeSpecName: "utilities") pod "4fe5bb8b-b990-4851-8a1a-3567eb55ae58" (UID: "4fe5bb8b-b990-4851-8a1a-3567eb55ae58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.720626 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-kube-api-access-ldkcc" (OuterVolumeSpecName: "kube-api-access-ldkcc") pod "4fe5bb8b-b990-4851-8a1a-3567eb55ae58" (UID: "4fe5bb8b-b990-4851-8a1a-3567eb55ae58"). InnerVolumeSpecName "kube-api-access-ldkcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.819409 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.819725 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldkcc\" (UniqueName: \"kubernetes.io/projected/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-kube-api-access-ldkcc\") on node \"crc\" DevicePath \"\"" Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.833350 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4fe5bb8b-b990-4851-8a1a-3567eb55ae58" (UID: "4fe5bb8b-b990-4851-8a1a-3567eb55ae58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:32:37 crc kubenswrapper[4774]: I1121 16:32:37.922250 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4fe5bb8b-b990-4851-8a1a-3567eb55ae58-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.094234 4774 generic.go:334] "Generic (PLEG): container finished" podID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerID="b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917" exitCode=0 Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.094308 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2mhzc" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.108377 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerDied","Data":"b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917"} Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.108435 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2mhzc" event={"ID":"4fe5bb8b-b990-4851-8a1a-3567eb55ae58","Type":"ContainerDied","Data":"fd9cdc12ec0e96b7e8241a7e54e4976c807347c506941a8fc019f49646253359"} Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.108459 4774 scope.go:117] "RemoveContainer" containerID="b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.133411 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2mhzc"] Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.142878 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2mhzc"] Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.144043 4774 scope.go:117] "RemoveContainer" containerID="4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.177521 4774 scope.go:117] "RemoveContainer" containerID="95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.231149 4774 scope.go:117] "RemoveContainer" containerID="b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917" Nov 21 16:32:38 crc kubenswrapper[4774]: E1121 16:32:38.231694 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917\": container with ID starting with b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917 not found: ID does not exist" containerID="b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.231739 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917"} err="failed to get container status \"b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917\": rpc error: code = NotFound desc = could not find container \"b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917\": container with ID starting with b2d47c83efcf0951d5e31b943657d412ef22cc15ac27c0c047f463c7d115c917 not found: ID does not exist" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.231768 4774 scope.go:117] "RemoveContainer" containerID="4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d" Nov 21 16:32:38 crc kubenswrapper[4774]: E1121 16:32:38.232017 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d\": container with ID starting with 4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d not found: ID does not exist" containerID="4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.232049 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d"} err="failed to get container status \"4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d\": rpc error: code = NotFound desc = could not find container \"4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d\": container with ID starting with 4ce9858729ba098a81513cb4fe2b80f6bb83a8a6dd99e85cd465320d3845e92d not found: ID does not exist" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.232067 4774 scope.go:117] "RemoveContainer" containerID="95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940" Nov 21 16:32:38 crc kubenswrapper[4774]: E1121 16:32:38.232295 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940\": container with ID starting with 95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940 not found: ID does not exist" containerID="95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940" Nov 21 16:32:38 crc kubenswrapper[4774]: I1121 16:32:38.232321 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940"} err="failed to get container status \"95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940\": rpc error: code = NotFound desc = could not find container \"95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940\": container with ID starting with 95427184b804b4f387d4962bfbc07c796fe665b044cc449be5a213f607220940 not found: ID does not exist" Nov 21 16:32:40 crc kubenswrapper[4774]: I1121 16:32:40.119317 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:32:40 crc kubenswrapper[4774]: E1121 16:32:40.120363 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:32:40 crc kubenswrapper[4774]: I1121 16:32:40.120377 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" path="/var/lib/kubelet/pods/4fe5bb8b-b990-4851-8a1a-3567eb55ae58/volumes" Nov 21 16:32:53 crc kubenswrapper[4774]: I1121 16:32:53.093035 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:32:53 crc kubenswrapper[4774]: E1121 16:32:53.093745 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:33:07 crc kubenswrapper[4774]: I1121 16:33:07.093330 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:33:07 crc kubenswrapper[4774]: I1121 16:33:07.417902 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"d0d0834ceda657ad63e78a2aa689d7cc74d9dfb1a14f0a81d702525608dd8b6b"} Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.313076 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5q98n"] Nov 21 16:33:40 crc kubenswrapper[4774]: E1121 16:33:40.314001 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="extract-content" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314016 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="extract-content" Nov 21 16:33:40 crc kubenswrapper[4774]: E1121 16:33:40.314059 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="extract-utilities" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314068 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="extract-utilities" Nov 21 16:33:40 crc kubenswrapper[4774]: E1121 16:33:40.314495 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314524 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" Nov 21 16:33:40 crc kubenswrapper[4774]: E1121 16:33:40.314544 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="registry-server" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314571 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="registry-server" Nov 21 16:33:40 crc kubenswrapper[4774]: E1121 16:33:40.314583 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="extract-utilities" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314591 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="extract-utilities" Nov 21 16:33:40 crc kubenswrapper[4774]: E1121 16:33:40.314620 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="extract-content" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314628 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="extract-content" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314947 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3467477-e5f5-45ef-9910-a109cf5f73b7" containerName="registry-server" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.314966 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fe5bb8b-b990-4851-8a1a-3567eb55ae58" containerName="registry-server" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.317036 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.330685 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5q98n"] Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.495285 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wjp2\" (UniqueName: \"kubernetes.io/projected/5cb0c6c5-4f6f-40b5-af63-767a7a033454-kube-api-access-7wjp2\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.495498 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-catalog-content\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.495541 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-utilities\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.597706 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-catalog-content\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.597783 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-utilities\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.597908 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wjp2\" (UniqueName: \"kubernetes.io/projected/5cb0c6c5-4f6f-40b5-af63-767a7a033454-kube-api-access-7wjp2\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.598671 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-catalog-content\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.598702 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-utilities\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.622334 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wjp2\" (UniqueName: \"kubernetes.io/projected/5cb0c6c5-4f6f-40b5-af63-767a7a033454-kube-api-access-7wjp2\") pod \"certified-operators-5q98n\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:40 crc kubenswrapper[4774]: I1121 16:33:40.643259 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:41 crc kubenswrapper[4774]: I1121 16:33:41.170787 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5q98n"] Nov 21 16:33:41 crc kubenswrapper[4774]: I1121 16:33:41.771465 4774 generic.go:334] "Generic (PLEG): container finished" podID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerID="7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08" exitCode=0 Nov 21 16:33:41 crc kubenswrapper[4774]: I1121 16:33:41.771536 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerDied","Data":"7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08"} Nov 21 16:33:41 crc kubenswrapper[4774]: I1121 16:33:41.771794 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerStarted","Data":"a14b705fd2b5e2b21913c50225d71ac9ffeea9792c202d2fe6782fc012c00dd6"} Nov 21 16:33:41 crc kubenswrapper[4774]: I1121 16:33:41.774434 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:33:42 crc kubenswrapper[4774]: I1121 16:33:42.798735 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerStarted","Data":"64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1"} Nov 21 16:33:43 crc kubenswrapper[4774]: I1121 16:33:43.813184 4774 generic.go:334] "Generic (PLEG): container finished" podID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerID="64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1" exitCode=0 Nov 21 16:33:43 crc kubenswrapper[4774]: I1121 16:33:43.813287 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerDied","Data":"64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1"} Nov 21 16:33:44 crc kubenswrapper[4774]: I1121 16:33:44.825854 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerStarted","Data":"edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69"} Nov 21 16:33:44 crc kubenswrapper[4774]: I1121 16:33:44.851552 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5q98n" podStartSLOduration=2.398999554 podStartE2EDuration="4.851535704s" podCreationTimestamp="2025-11-21 16:33:40 +0000 UTC" firstStartedPulling="2025-11-21 16:33:41.774211096 +0000 UTC m=+9012.426410355" lastFinishedPulling="2025-11-21 16:33:44.226747256 +0000 UTC m=+9014.878946505" observedRunningTime="2025-11-21 16:33:44.846114859 +0000 UTC m=+9015.498314158" watchObservedRunningTime="2025-11-21 16:33:44.851535704 +0000 UTC m=+9015.503734963" Nov 21 16:33:50 crc kubenswrapper[4774]: I1121 16:33:50.643608 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:50 crc kubenswrapper[4774]: I1121 16:33:50.644205 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:50 crc kubenswrapper[4774]: I1121 16:33:50.702113 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:50 crc kubenswrapper[4774]: I1121 16:33:50.928042 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:50 crc kubenswrapper[4774]: I1121 16:33:50.983574 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5q98n"] Nov 21 16:33:52 crc kubenswrapper[4774]: I1121 16:33:52.901078 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5q98n" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="registry-server" containerID="cri-o://edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69" gracePeriod=2 Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.915666 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.916906 4774 generic.go:334] "Generic (PLEG): container finished" podID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerID="edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69" exitCode=0 Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.916957 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerDied","Data":"edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69"} Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.916989 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5q98n" event={"ID":"5cb0c6c5-4f6f-40b5-af63-767a7a033454","Type":"ContainerDied","Data":"a14b705fd2b5e2b21913c50225d71ac9ffeea9792c202d2fe6782fc012c00dd6"} Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.917008 4774 scope.go:117] "RemoveContainer" containerID="edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69" Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.946161 4774 scope.go:117] "RemoveContainer" containerID="64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1" Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.970125 4774 scope.go:117] "RemoveContainer" containerID="7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08" Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.992483 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-utilities\") pod \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.992629 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-catalog-content\") pod \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.992870 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wjp2\" (UniqueName: \"kubernetes.io/projected/5cb0c6c5-4f6f-40b5-af63-767a7a033454-kube-api-access-7wjp2\") pod \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\" (UID: \"5cb0c6c5-4f6f-40b5-af63-767a7a033454\") " Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.993747 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-utilities" (OuterVolumeSpecName: "utilities") pod "5cb0c6c5-4f6f-40b5-af63-767a7a033454" (UID: "5cb0c6c5-4f6f-40b5-af63-767a7a033454"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:33:53 crc kubenswrapper[4774]: I1121 16:33:53.998834 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb0c6c5-4f6f-40b5-af63-767a7a033454-kube-api-access-7wjp2" (OuterVolumeSpecName: "kube-api-access-7wjp2") pod "5cb0c6c5-4f6f-40b5-af63-767a7a033454" (UID: "5cb0c6c5-4f6f-40b5-af63-767a7a033454"). InnerVolumeSpecName "kube-api-access-7wjp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.036409 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cb0c6c5-4f6f-40b5-af63-767a7a033454" (UID: "5cb0c6c5-4f6f-40b5-af63-767a7a033454"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.080578 4774 scope.go:117] "RemoveContainer" containerID="edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69" Nov 21 16:33:54 crc kubenswrapper[4774]: E1121 16:33:54.081144 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69\": container with ID starting with edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69 not found: ID does not exist" containerID="edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.081178 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69"} err="failed to get container status \"edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69\": rpc error: code = NotFound desc = could not find container \"edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69\": container with ID starting with edf23ba9cc13c3903affdfbedb1cfc61bf223a4c3a829890d09061c447074a69 not found: ID does not exist" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.081203 4774 scope.go:117] "RemoveContainer" containerID="64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1" Nov 21 16:33:54 crc kubenswrapper[4774]: E1121 16:33:54.081887 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1\": container with ID starting with 64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1 not found: ID does not exist" containerID="64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.081947 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1"} err="failed to get container status \"64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1\": rpc error: code = NotFound desc = could not find container \"64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1\": container with ID starting with 64f47a8a7a62788afb8a7e883f263e5b26639794cb2b7836d9667904d43565e1 not found: ID does not exist" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.081976 4774 scope.go:117] "RemoveContainer" containerID="7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08" Nov 21 16:33:54 crc kubenswrapper[4774]: E1121 16:33:54.082473 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08\": container with ID starting with 7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08 not found: ID does not exist" containerID="7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.082501 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08"} err="failed to get container status \"7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08\": rpc error: code = NotFound desc = could not find container \"7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08\": container with ID starting with 7fa396edfcc6c9ab307772c818d29f945851cb7e3da8bf94a7eab8103c738e08 not found: ID does not exist" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.095029 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wjp2\" (UniqueName: \"kubernetes.io/projected/5cb0c6c5-4f6f-40b5-af63-767a7a033454-kube-api-access-7wjp2\") on node \"crc\" DevicePath \"\"" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.095068 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.095078 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cb0c6c5-4f6f-40b5-af63-767a7a033454-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.926894 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5q98n" Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.968898 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5q98n"] Nov 21 16:33:54 crc kubenswrapper[4774]: I1121 16:33:54.979915 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5q98n"] Nov 21 16:33:56 crc kubenswrapper[4774]: I1121 16:33:56.107293 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" path="/var/lib/kubelet/pods/5cb0c6c5-4f6f-40b5-af63-767a7a033454/volumes" Nov 21 16:35:29 crc kubenswrapper[4774]: I1121 16:35:29.601526 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:35:29 crc kubenswrapper[4774]: I1121 16:35:29.602204 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:35:59 crc kubenswrapper[4774]: I1121 16:35:59.601169 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:35:59 crc kubenswrapper[4774]: I1121 16:35:59.601870 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:36:29 crc kubenswrapper[4774]: I1121 16:36:29.601150 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:36:29 crc kubenswrapper[4774]: I1121 16:36:29.601667 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:36:29 crc kubenswrapper[4774]: I1121 16:36:29.601717 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:36:29 crc kubenswrapper[4774]: I1121 16:36:29.602587 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d0d0834ceda657ad63e78a2aa689d7cc74d9dfb1a14f0a81d702525608dd8b6b"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:36:29 crc kubenswrapper[4774]: I1121 16:36:29.602641 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://d0d0834ceda657ad63e78a2aa689d7cc74d9dfb1a14f0a81d702525608dd8b6b" gracePeriod=600 Nov 21 16:36:30 crc kubenswrapper[4774]: I1121 16:36:30.640812 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="d0d0834ceda657ad63e78a2aa689d7cc74d9dfb1a14f0a81d702525608dd8b6b" exitCode=0 Nov 21 16:36:30 crc kubenswrapper[4774]: I1121 16:36:30.640849 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"d0d0834ceda657ad63e78a2aa689d7cc74d9dfb1a14f0a81d702525608dd8b6b"} Nov 21 16:36:30 crc kubenswrapper[4774]: I1121 16:36:30.641415 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28"} Nov 21 16:36:30 crc kubenswrapper[4774]: I1121 16:36:30.641435 4774 scope.go:117] "RemoveContainer" containerID="9cca9222fcc80b9d832690027a4a6216e49ac83afccd1aefcd90e048e1c3d848" Nov 21 16:36:54 crc kubenswrapper[4774]: I1121 16:36:54.916756 4774 generic.go:334] "Generic (PLEG): container finished" podID="56441432-7ab5-47a5-85fc-d584e665625e" containerID="1aee94be8b78b49b8b277a08612df38d781ec48198585a49d0633032c583a075" exitCode=0 Nov 21 16:36:54 crc kubenswrapper[4774]: I1121 16:36:54.916831 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" event={"ID":"56441432-7ab5-47a5-85fc-d584e665625e","Type":"ContainerDied","Data":"1aee94be8b78b49b8b277a08612df38d781ec48198585a49d0633032c583a075"} Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.352016 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.424771 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-inventory\") pod \"56441432-7ab5-47a5-85fc-d584e665625e\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.424897 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ceph\") pod \"56441432-7ab5-47a5-85fc-d584e665625e\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.425099 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzr6q\" (UniqueName: \"kubernetes.io/projected/56441432-7ab5-47a5-85fc-d584e665625e-kube-api-access-wzr6q\") pod \"56441432-7ab5-47a5-85fc-d584e665625e\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.425198 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-agent-neutron-config-0\") pod \"56441432-7ab5-47a5-85fc-d584e665625e\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.425302 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-combined-ca-bundle\") pod \"56441432-7ab5-47a5-85fc-d584e665625e\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.425365 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ssh-key\") pod \"56441432-7ab5-47a5-85fc-d584e665625e\" (UID: \"56441432-7ab5-47a5-85fc-d584e665625e\") " Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.431299 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ceph" (OuterVolumeSpecName: "ceph") pod "56441432-7ab5-47a5-85fc-d584e665625e" (UID: "56441432-7ab5-47a5-85fc-d584e665625e"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.431563 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56441432-7ab5-47a5-85fc-d584e665625e-kube-api-access-wzr6q" (OuterVolumeSpecName: "kube-api-access-wzr6q") pod "56441432-7ab5-47a5-85fc-d584e665625e" (UID: "56441432-7ab5-47a5-85fc-d584e665625e"). InnerVolumeSpecName "kube-api-access-wzr6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.438630 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "56441432-7ab5-47a5-85fc-d584e665625e" (UID: "56441432-7ab5-47a5-85fc-d584e665625e"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.460413 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-inventory" (OuterVolumeSpecName: "inventory") pod "56441432-7ab5-47a5-85fc-d584e665625e" (UID: "56441432-7ab5-47a5-85fc-d584e665625e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.468371 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "56441432-7ab5-47a5-85fc-d584e665625e" (UID: "56441432-7ab5-47a5-85fc-d584e665625e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.480023 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "56441432-7ab5-47a5-85fc-d584e665625e" (UID: "56441432-7ab5-47a5-85fc-d584e665625e"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.528304 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzr6q\" (UniqueName: \"kubernetes.io/projected/56441432-7ab5-47a5-85fc-d584e665625e-kube-api-access-wzr6q\") on node \"crc\" DevicePath \"\"" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.528497 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.528682 4774 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.528767 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.528860 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.528936 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/56441432-7ab5-47a5-85fc-d584e665625e-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.939904 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" event={"ID":"56441432-7ab5-47a5-85fc-d584e665625e","Type":"ContainerDied","Data":"e661ed6e339b1db911e0c745345fa574f9c83509ddf5274494c865220afa427f"} Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.940253 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e661ed6e339b1db911e0c745345fa574f9c83509ddf5274494c865220afa427f" Nov 21 16:36:56 crc kubenswrapper[4774]: I1121 16:36:56.939968 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-dhcp-openstack-openstack-cell1-8l9vq" Nov 21 16:37:04 crc kubenswrapper[4774]: I1121 16:37:04.879702 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 16:37:04 crc kubenswrapper[4774]: I1121 16:37:04.880281 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="fce1c2fe-593d-489c-a4ee-79b2be128d8b" containerName="nova-cell0-conductor-conductor" containerID="cri-o://065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00" gracePeriod=30 Nov 21 16:37:04 crc kubenswrapper[4774]: I1121 16:37:04.900786 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 16:37:04 crc kubenswrapper[4774]: I1121 16:37:04.901011 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-conductor-0" podUID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" containerName="nova-cell1-conductor-conductor" containerID="cri-o://1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" gracePeriod=30 Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.946909 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.947250 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerName="nova-scheduler-scheduler" containerID="cri-o://080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" gracePeriod=30 Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.967008 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.967432 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-api" containerID="cri-o://47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0" gracePeriod=30 Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.967271 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-log" containerID="cri-o://68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e" gracePeriod=30 Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.977430 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.977910 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-log" containerID="cri-o://9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af" gracePeriod=30 Nov 21 16:37:05 crc kubenswrapper[4774]: I1121 16:37:05.978106 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-metadata" containerID="cri-o://5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a" gracePeriod=30 Nov 21 16:37:06 crc kubenswrapper[4774]: E1121 16:37:06.017860 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 16:37:06 crc kubenswrapper[4774]: E1121 16:37:06.022809 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 16:37:06 crc kubenswrapper[4774]: E1121 16:37:06.024158 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 16:37:06 crc kubenswrapper[4774]: E1121 16:37:06.024305 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerName="nova-scheduler-scheduler" Nov 21 16:37:07 crc kubenswrapper[4774]: E1121 16:37:07.005590 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 16:37:07 crc kubenswrapper[4774]: E1121 16:37:07.008489 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 16:37:07 crc kubenswrapper[4774]: E1121 16:37:07.011473 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Nov 21 16:37:07 crc kubenswrapper[4774]: E1121 16:37:07.011624 4774 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell1-conductor-0" podUID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" containerName="nova-cell1-conductor-conductor" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.047465 4774 generic.go:334] "Generic (PLEG): container finished" podID="5978983d-ed01-4414-a4ac-bd04b249957b" containerID="9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af" exitCode=143 Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.047558 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5978983d-ed01-4414-a4ac-bd04b249957b","Type":"ContainerDied","Data":"9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af"} Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.050975 4774 generic.go:334] "Generic (PLEG): container finished" podID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerID="68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e" exitCode=143 Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.051064 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"21fde3a8-382a-42fe-863a-2c02cb7ccc90","Type":"ContainerDied","Data":"68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e"} Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.574265 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.668363 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-combined-ca-bundle\") pod \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.668468 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qp7c\" (UniqueName: \"kubernetes.io/projected/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-kube-api-access-2qp7c\") pod \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.668497 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-config-data\") pod \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\" (UID: \"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784\") " Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.674720 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-kube-api-access-2qp7c" (OuterVolumeSpecName: "kube-api-access-2qp7c") pod "7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" (UID: "7b354117-2f9a-4c3a-b3fb-d8f0a61e8784"). InnerVolumeSpecName "kube-api-access-2qp7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.700036 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-config-data" (OuterVolumeSpecName: "config-data") pod "7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" (UID: "7b354117-2f9a-4c3a-b3fb-d8f0a61e8784"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.708935 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" (UID: "7b354117-2f9a-4c3a-b3fb-d8f0a61e8784"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.770953 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.770985 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qp7c\" (UniqueName: \"kubernetes.io/projected/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-kube-api-access-2qp7c\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:07 crc kubenswrapper[4774]: I1121 16:37:07.770996 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.063301 4774 generic.go:334] "Generic (PLEG): container finished" podID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" exitCode=0 Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.063343 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784","Type":"ContainerDied","Data":"1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10"} Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.063380 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.063397 4774 scope.go:117] "RemoveContainer" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.063381 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"7b354117-2f9a-4c3a-b3fb-d8f0a61e8784","Type":"ContainerDied","Data":"a42817418a58f63cf62a15b84fa4cd528b6c4a625d62147d44d8ee4e99a064cc"} Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.096623 4774 scope.go:117] "RemoveContainer" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" Nov 21 16:37:08 crc kubenswrapper[4774]: E1121 16:37:08.097120 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10\": container with ID starting with 1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10 not found: ID does not exist" containerID="1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.097159 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10"} err="failed to get container status \"1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10\": rpc error: code = NotFound desc = could not find container \"1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10\": container with ID starting with 1c69fec66e47970373ff7c1b3c88e993930bff22009667067c63b6b25754af10 not found: ID does not exist" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.107219 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.108730 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.128668 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 16:37:08 crc kubenswrapper[4774]: E1121 16:37:08.129301 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="extract-utilities" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129326 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="extract-utilities" Nov 21 16:37:08 crc kubenswrapper[4774]: E1121 16:37:08.129347 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="extract-content" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129355 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="extract-content" Nov 21 16:37:08 crc kubenswrapper[4774]: E1121 16:37:08.129375 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56441432-7ab5-47a5-85fc-d584e665625e" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129382 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="56441432-7ab5-47a5-85fc-d584e665625e" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 21 16:37:08 crc kubenswrapper[4774]: E1121 16:37:08.129403 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="registry-server" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129411 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="registry-server" Nov 21 16:37:08 crc kubenswrapper[4774]: E1121 16:37:08.129429 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" containerName="nova-cell1-conductor-conductor" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129436 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" containerName="nova-cell1-conductor-conductor" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129723 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="56441432-7ab5-47a5-85fc-d584e665625e" containerName="neutron-dhcp-openstack-openstack-cell1" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129741 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" containerName="nova-cell1-conductor-conductor" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.129764 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb0c6c5-4f6f-40b5-af63-767a7a033454" containerName="registry-server" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.130747 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.132410 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.146519 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.178672 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb049da2-5aaa-427a-bce8-3bb7843aa828-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.178763 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb049da2-5aaa-427a-bce8-3bb7843aa828-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.178979 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpzff\" (UniqueName: \"kubernetes.io/projected/eb049da2-5aaa-427a-bce8-3bb7843aa828-kube-api-access-wpzff\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.280421 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb049da2-5aaa-427a-bce8-3bb7843aa828-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.280479 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb049da2-5aaa-427a-bce8-3bb7843aa828-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.280538 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpzff\" (UniqueName: \"kubernetes.io/projected/eb049da2-5aaa-427a-bce8-3bb7843aa828-kube-api-access-wpzff\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.287258 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb049da2-5aaa-427a-bce8-3bb7843aa828-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.287291 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb049da2-5aaa-427a-bce8-3bb7843aa828-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.297881 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpzff\" (UniqueName: \"kubernetes.io/projected/eb049da2-5aaa-427a-bce8-3bb7843aa828-kube-api-access-wpzff\") pod \"nova-cell1-conductor-0\" (UID: \"eb049da2-5aaa-427a-bce8-3bb7843aa828\") " pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.459887 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.646915 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.790558 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nxw9\" (UniqueName: \"kubernetes.io/projected/fce1c2fe-593d-489c-a4ee-79b2be128d8b-kube-api-access-4nxw9\") pod \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.790967 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-combined-ca-bundle\") pod \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.791040 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-config-data\") pod \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\" (UID: \"fce1c2fe-593d-489c-a4ee-79b2be128d8b\") " Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.807895 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce1c2fe-593d-489c-a4ee-79b2be128d8b-kube-api-access-4nxw9" (OuterVolumeSpecName: "kube-api-access-4nxw9") pod "fce1c2fe-593d-489c-a4ee-79b2be128d8b" (UID: "fce1c2fe-593d-489c-a4ee-79b2be128d8b"). InnerVolumeSpecName "kube-api-access-4nxw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.835193 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-config-data" (OuterVolumeSpecName: "config-data") pod "fce1c2fe-593d-489c-a4ee-79b2be128d8b" (UID: "fce1c2fe-593d-489c-a4ee-79b2be128d8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.837034 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fce1c2fe-593d-489c-a4ee-79b2be128d8b" (UID: "fce1c2fe-593d-489c-a4ee-79b2be128d8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.894226 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nxw9\" (UniqueName: \"kubernetes.io/projected/fce1c2fe-593d-489c-a4ee-79b2be128d8b-kube-api-access-4nxw9\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.894268 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.894283 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce1c2fe-593d-489c-a4ee-79b2be128d8b-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:08 crc kubenswrapper[4774]: I1121 16:37:08.945573 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.076711 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"eb049da2-5aaa-427a-bce8-3bb7843aa828","Type":"ContainerStarted","Data":"c1a5f05663fd753eb257dafdf24e03c7e381adc0f59da7d0f7baed4eed9ba0ff"} Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.079539 4774 generic.go:334] "Generic (PLEG): container finished" podID="fce1c2fe-593d-489c-a4ee-79b2be128d8b" containerID="065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00" exitCode=0 Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.079571 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fce1c2fe-593d-489c-a4ee-79b2be128d8b","Type":"ContainerDied","Data":"065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00"} Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.079595 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fce1c2fe-593d-489c-a4ee-79b2be128d8b","Type":"ContainerDied","Data":"6cfdad9a8631811badec4d377f5becae5ddcefba9d57ff031efd18d06bebe541"} Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.079647 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.079647 4774 scope.go:117] "RemoveContainer" containerID="065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.115371 4774 scope.go:117] "RemoveContainer" containerID="065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00" Nov 21 16:37:09 crc kubenswrapper[4774]: E1121 16:37:09.116182 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00\": container with ID starting with 065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00 not found: ID does not exist" containerID="065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.116274 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00"} err="failed to get container status \"065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00\": rpc error: code = NotFound desc = could not find container \"065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00\": container with ID starting with 065f8b4f43cc625c8dee5f04d6d260b752e25c303879d9b01652e71d488c7f00 not found: ID does not exist" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.118138 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.127676 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.128701 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": read tcp 10.217.0.2:40718->10.217.1.83:8775: read: connection reset by peer" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.128726 4774 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.83:8775/\": read tcp 10.217.0.2:40714->10.217.1.83:8775: read: connection reset by peer" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.141585 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 16:37:09 crc kubenswrapper[4774]: E1121 16:37:09.142048 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce1c2fe-593d-489c-a4ee-79b2be128d8b" containerName="nova-cell0-conductor-conductor" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.142067 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce1c2fe-593d-489c-a4ee-79b2be128d8b" containerName="nova-cell0-conductor-conductor" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.142281 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce1c2fe-593d-489c-a4ee-79b2be128d8b" containerName="nova-cell0-conductor-conductor" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.143275 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.147308 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.168871 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.207067 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9thq\" (UniqueName: \"kubernetes.io/projected/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-kube-api-access-r9thq\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.207157 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.207403 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.315253 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9thq\" (UniqueName: \"kubernetes.io/projected/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-kube-api-access-r9thq\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.315324 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.315404 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.323108 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.323949 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.334854 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9thq\" (UniqueName: \"kubernetes.io/projected/f141d1f0-7f09-41e7-a3f1-b921ebcf68e0-kube-api-access-r9thq\") pod \"nova-cell0-conductor-0\" (UID: \"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0\") " pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.560471 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.745576 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.758490 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827003 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm8q7\" (UniqueName: \"kubernetes.io/projected/21fde3a8-382a-42fe-863a-2c02cb7ccc90-kube-api-access-zm8q7\") pod \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827149 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-combined-ca-bundle\") pod \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827210 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72jgh\" (UniqueName: \"kubernetes.io/projected/5978983d-ed01-4414-a4ac-bd04b249957b-kube-api-access-72jgh\") pod \"5978983d-ed01-4414-a4ac-bd04b249957b\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827278 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-config-data\") pod \"5978983d-ed01-4414-a4ac-bd04b249957b\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827322 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5978983d-ed01-4414-a4ac-bd04b249957b-logs\") pod \"5978983d-ed01-4414-a4ac-bd04b249957b\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827336 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-config-data\") pod \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827366 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21fde3a8-382a-42fe-863a-2c02cb7ccc90-logs\") pod \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\" (UID: \"21fde3a8-382a-42fe-863a-2c02cb7ccc90\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827446 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-combined-ca-bundle\") pod \"5978983d-ed01-4414-a4ac-bd04b249957b\" (UID: \"5978983d-ed01-4414-a4ac-bd04b249957b\") " Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.827987 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5978983d-ed01-4414-a4ac-bd04b249957b-logs" (OuterVolumeSpecName: "logs") pod "5978983d-ed01-4414-a4ac-bd04b249957b" (UID: "5978983d-ed01-4414-a4ac-bd04b249957b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.832298 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21fde3a8-382a-42fe-863a-2c02cb7ccc90-logs" (OuterVolumeSpecName: "logs") pod "21fde3a8-382a-42fe-863a-2c02cb7ccc90" (UID: "21fde3a8-382a-42fe-863a-2c02cb7ccc90"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.834060 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21fde3a8-382a-42fe-863a-2c02cb7ccc90-kube-api-access-zm8q7" (OuterVolumeSpecName: "kube-api-access-zm8q7") pod "21fde3a8-382a-42fe-863a-2c02cb7ccc90" (UID: "21fde3a8-382a-42fe-863a-2c02cb7ccc90"). InnerVolumeSpecName "kube-api-access-zm8q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.841931 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5978983d-ed01-4414-a4ac-bd04b249957b-kube-api-access-72jgh" (OuterVolumeSpecName: "kube-api-access-72jgh") pod "5978983d-ed01-4414-a4ac-bd04b249957b" (UID: "5978983d-ed01-4414-a4ac-bd04b249957b"). InnerVolumeSpecName "kube-api-access-72jgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.859715 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-config-data" (OuterVolumeSpecName: "config-data") pod "5978983d-ed01-4414-a4ac-bd04b249957b" (UID: "5978983d-ed01-4414-a4ac-bd04b249957b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.861244 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21fde3a8-382a-42fe-863a-2c02cb7ccc90" (UID: "21fde3a8-382a-42fe-863a-2c02cb7ccc90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.863738 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-config-data" (OuterVolumeSpecName: "config-data") pod "21fde3a8-382a-42fe-863a-2c02cb7ccc90" (UID: "21fde3a8-382a-42fe-863a-2c02cb7ccc90"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.877506 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5978983d-ed01-4414-a4ac-bd04b249957b" (UID: "5978983d-ed01-4414-a4ac-bd04b249957b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.930017 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72jgh\" (UniqueName: \"kubernetes.io/projected/5978983d-ed01-4414-a4ac-bd04b249957b-kube-api-access-72jgh\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.930567 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.930650 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.930726 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5978983d-ed01-4414-a4ac-bd04b249957b-logs\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.930804 4774 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21fde3a8-382a-42fe-863a-2c02cb7ccc90-logs\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.930956 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5978983d-ed01-4414-a4ac-bd04b249957b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.931041 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm8q7\" (UniqueName: \"kubernetes.io/projected/21fde3a8-382a-42fe-863a-2c02cb7ccc90-kube-api-access-zm8q7\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:09 crc kubenswrapper[4774]: I1121 16:37:09.931121 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21fde3a8-382a-42fe-863a-2c02cb7ccc90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.087800 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.095122 4774 generic.go:334] "Generic (PLEG): container finished" podID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerID="47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0" exitCode=0 Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.099225 4774 generic.go:334] "Generic (PLEG): container finished" podID="5978983d-ed01-4414-a4ac-bd04b249957b" containerID="5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a" exitCode=0 Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.108849 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.109019 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.148154 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b354117-2f9a-4c3a-b3fb-d8f0a61e8784" path="/var/lib/kubelet/pods/7b354117-2f9a-4c3a-b3fb-d8f0a61e8784/volumes" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.149236 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce1c2fe-593d-489c-a4ee-79b2be128d8b" path="/var/lib/kubelet/pods/fce1c2fe-593d-489c-a4ee-79b2be128d8b/volumes" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.150754 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"21fde3a8-382a-42fe-863a-2c02cb7ccc90","Type":"ContainerDied","Data":"47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0"} Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.150789 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"21fde3a8-382a-42fe-863a-2c02cb7ccc90","Type":"ContainerDied","Data":"4ad7636096d84c190b3275405d0126fd4a40f4a23a4775c0eb6d7c073c3a1b7d"} Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.150801 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"eb049da2-5aaa-427a-bce8-3bb7843aa828","Type":"ContainerStarted","Data":"efcb1e001b1c40a12fd4744144a69265ea5b3745256eb95db1486004d18dc7f0"} Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.150830 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5978983d-ed01-4414-a4ac-bd04b249957b","Type":"ContainerDied","Data":"5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a"} Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.150844 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"5978983d-ed01-4414-a4ac-bd04b249957b","Type":"ContainerDied","Data":"7791cceee7af484b3d3d1b74ab4e94e05edfe0aba5156fc54188ad7b284cca16"} Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.150854 4774 scope.go:117] "RemoveContainer" containerID="47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.182031 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.182010529 podStartE2EDuration="2.182010529s" podCreationTimestamp="2025-11-21 16:37:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:37:10.157754475 +0000 UTC m=+9220.809953754" watchObservedRunningTime="2025-11-21 16:37:10.182010529 +0000 UTC m=+9220.834209788" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.196143 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.210955 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.223093 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.244246 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.250970 4774 scope.go:117] "RemoveContainer" containerID="68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.258827 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.259321 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-metadata" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259335 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-metadata" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.259354 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-api" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259360 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-api" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.259386 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-log" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259393 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-log" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.259424 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-log" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259431 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-log" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259639 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-metadata" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259663 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-api" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259678 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" containerName="nova-api-log" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.259693 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" containerName="nova-metadata-log" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.261103 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.263156 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.273174 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.279653 4774 scope.go:117] "RemoveContainer" containerID="47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.280083 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0\": container with ID starting with 47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0 not found: ID does not exist" containerID="47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.280129 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0"} err="failed to get container status \"47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0\": rpc error: code = NotFound desc = could not find container \"47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0\": container with ID starting with 47791a26f408dfb2dce96f19a6ccbd2c71e6308cb388a4724b5d800fc93daac0 not found: ID does not exist" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.280166 4774 scope.go:117] "RemoveContainer" containerID="68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.280398 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e\": container with ID starting with 68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e not found: ID does not exist" containerID="68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.280418 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e"} err="failed to get container status \"68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e\": rpc error: code = NotFound desc = could not find container \"68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e\": container with ID starting with 68b4a4017968caee1553d92a2918e2ff649873b9c44f2efa24afdbd7235d991e not found: ID does not exist" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.280433 4774 scope.go:117] "RemoveContainer" containerID="5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.287234 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.289903 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.294350 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.295774 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341126 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-config-data\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341294 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4fb5ae-af56-4695-b264-e399649045f5-logs\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341319 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341348 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4fb5ae-af56-4695-b264-e399649045f5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341749 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-logs\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341799 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppjkn\" (UniqueName: \"kubernetes.io/projected/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-kube-api-access-ppjkn\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341875 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4fb5ae-af56-4695-b264-e399649045f5-config-data\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.341892 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj4w6\" (UniqueName: \"kubernetes.io/projected/ce4fb5ae-af56-4695-b264-e399649045f5-kube-api-access-rj4w6\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.410794 4774 scope.go:117] "RemoveContainer" containerID="9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.438066 4774 scope.go:117] "RemoveContainer" containerID="5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.438482 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a\": container with ID starting with 5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a not found: ID does not exist" containerID="5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.438520 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a"} err="failed to get container status \"5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a\": rpc error: code = NotFound desc = could not find container \"5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a\": container with ID starting with 5658a902d636323a7b7d3f4b17e3a98dcf879c2d51757af799788e89ba63e15a not found: ID does not exist" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.438548 4774 scope.go:117] "RemoveContainer" containerID="9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af" Nov 21 16:37:10 crc kubenswrapper[4774]: E1121 16:37:10.438956 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af\": container with ID starting with 9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af not found: ID does not exist" containerID="9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.438980 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af"} err="failed to get container status \"9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af\": rpc error: code = NotFound desc = could not find container \"9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af\": container with ID starting with 9e706037b3f2c5a797f2877dab35863bae93e63aa57fd3cc03b8ed4b72d131af not found: ID does not exist" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.444280 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-config-data\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.444507 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4fb5ae-af56-4695-b264-e399649045f5-logs\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.444610 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.444703 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4fb5ae-af56-4695-b264-e399649045f5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.444879 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce4fb5ae-af56-4695-b264-e399649045f5-logs\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.444896 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-logs\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.445006 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppjkn\" (UniqueName: \"kubernetes.io/projected/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-kube-api-access-ppjkn\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.445111 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4fb5ae-af56-4695-b264-e399649045f5-config-data\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.445137 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj4w6\" (UniqueName: \"kubernetes.io/projected/ce4fb5ae-af56-4695-b264-e399649045f5-kube-api-access-rj4w6\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.445584 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-logs\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.449118 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-config-data\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.449125 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce4fb5ae-af56-4695-b264-e399649045f5-config-data\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.449688 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.451524 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce4fb5ae-af56-4695-b264-e399649045f5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.466433 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj4w6\" (UniqueName: \"kubernetes.io/projected/ce4fb5ae-af56-4695-b264-e399649045f5-kube-api-access-rj4w6\") pod \"nova-metadata-0\" (UID: \"ce4fb5ae-af56-4695-b264-e399649045f5\") " pod="openstack/nova-metadata-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.466462 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppjkn\" (UniqueName: \"kubernetes.io/projected/fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7-kube-api-access-ppjkn\") pod \"nova-api-0\" (UID: \"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7\") " pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.715185 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Nov 21 16:37:10 crc kubenswrapper[4774]: I1121 16:37:10.741440 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Nov 21 16:37:11 crc kubenswrapper[4774]: E1121 16:37:11.015053 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba is running failed: container process not found" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 16:37:11 crc kubenswrapper[4774]: E1121 16:37:11.015884 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba is running failed: container process not found" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 16:37:11 crc kubenswrapper[4774]: E1121 16:37:11.016510 4774 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba is running failed: container process not found" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Nov 21 16:37:11 crc kubenswrapper[4774]: E1121 16:37:11.016552 4774 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerName="nova-scheduler-scheduler" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.114272 4774 generic.go:334] "Generic (PLEG): container finished" podID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" exitCode=0 Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.114344 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19e323d5-4bb6-4769-aab0-fe396014cc08","Type":"ContainerDied","Data":"080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba"} Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.120259 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0","Type":"ContainerStarted","Data":"aa1857901a9f06808e8aaab26a03883d1d68ad0a991b927fa1ba33a9f2669c8c"} Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.120305 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"f141d1f0-7f09-41e7-a3f1-b921ebcf68e0","Type":"ContainerStarted","Data":"8da5ad5e38bc562438d2a1f53cd7646c99e3571f49b9a779362e47f9f13009c8"} Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.120519 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.124046 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.139155 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.139137352 podStartE2EDuration="2.139137352s" podCreationTimestamp="2025-11-21 16:37:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:37:11.138398761 +0000 UTC m=+9221.790598020" watchObservedRunningTime="2025-11-21 16:37:11.139137352 +0000 UTC m=+9221.791336611" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.208613 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.215357 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.263307 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lww96\" (UniqueName: \"kubernetes.io/projected/19e323d5-4bb6-4769-aab0-fe396014cc08-kube-api-access-lww96\") pod \"19e323d5-4bb6-4769-aab0-fe396014cc08\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.264326 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-config-data\") pod \"19e323d5-4bb6-4769-aab0-fe396014cc08\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.264468 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-combined-ca-bundle\") pod \"19e323d5-4bb6-4769-aab0-fe396014cc08\" (UID: \"19e323d5-4bb6-4769-aab0-fe396014cc08\") " Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.271038 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19e323d5-4bb6-4769-aab0-fe396014cc08-kube-api-access-lww96" (OuterVolumeSpecName: "kube-api-access-lww96") pod "19e323d5-4bb6-4769-aab0-fe396014cc08" (UID: "19e323d5-4bb6-4769-aab0-fe396014cc08"). InnerVolumeSpecName "kube-api-access-lww96". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.294192 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-config-data" (OuterVolumeSpecName: "config-data") pod "19e323d5-4bb6-4769-aab0-fe396014cc08" (UID: "19e323d5-4bb6-4769-aab0-fe396014cc08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.311136 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19e323d5-4bb6-4769-aab0-fe396014cc08" (UID: "19e323d5-4bb6-4769-aab0-fe396014cc08"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.371217 4774 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-config-data\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.371253 4774 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19e323d5-4bb6-4769-aab0-fe396014cc08-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.371265 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lww96\" (UniqueName: \"kubernetes.io/projected/19e323d5-4bb6-4769-aab0-fe396014cc08-kube-api-access-lww96\") on node \"crc\" DevicePath \"\"" Nov 21 16:37:11 crc kubenswrapper[4774]: I1121 16:37:11.473570 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.105431 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21fde3a8-382a-42fe-863a-2c02cb7ccc90" path="/var/lib/kubelet/pods/21fde3a8-382a-42fe-863a-2c02cb7ccc90/volumes" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.106447 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5978983d-ed01-4414-a4ac-bd04b249957b" path="/var/lib/kubelet/pods/5978983d-ed01-4414-a4ac-bd04b249957b/volumes" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.137527 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.137557 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"19e323d5-4bb6-4769-aab0-fe396014cc08","Type":"ContainerDied","Data":"6aa0c5ea12eb4cbc6504e07c0e24e9a2aa448c1b6c268cb945b7411d0b9d91a9"} Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.137608 4774 scope.go:117] "RemoveContainer" containerID="080949d5da8616bfdac9b1ea273ff6e26eaeb36dfadda477fe803eb9bc3f7fba" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.139162 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce4fb5ae-af56-4695-b264-e399649045f5","Type":"ContainerStarted","Data":"be9861b781ec31f0596c95d443c8c6abf37872a83daa0a411417c6df57d04be7"} Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.142667 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7","Type":"ContainerStarted","Data":"63efec93807a26b41657eccd7137be72ada1f582299d72a15014c9e3ea01b5c7"} Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.143090 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7","Type":"ContainerStarted","Data":"e8d9946d49e4c543412271a7a90f40b6ce466f16c428a223d5eadd4580069b88"} Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.175208 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.187755 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.202439 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 16:37:12 crc kubenswrapper[4774]: E1121 16:37:12.203092 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerName="nova-scheduler-scheduler" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.203118 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerName="nova-scheduler-scheduler" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.203373 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" containerName="nova-scheduler-scheduler" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.204464 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.208731 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.215503 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.293195 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5083fc02-32d1-460e-899b-1d2e383296c5-config-data\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.293265 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsk52\" (UniqueName: \"kubernetes.io/projected/5083fc02-32d1-460e-899b-1d2e383296c5-kube-api-access-hsk52\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.293372 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5083fc02-32d1-460e-899b-1d2e383296c5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.395493 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5083fc02-32d1-460e-899b-1d2e383296c5-config-data\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.395581 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsk52\" (UniqueName: \"kubernetes.io/projected/5083fc02-32d1-460e-899b-1d2e383296c5-kube-api-access-hsk52\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.395718 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5083fc02-32d1-460e-899b-1d2e383296c5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.399808 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5083fc02-32d1-460e-899b-1d2e383296c5-config-data\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.400610 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5083fc02-32d1-460e-899b-1d2e383296c5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.410852 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsk52\" (UniqueName: \"kubernetes.io/projected/5083fc02-32d1-460e-899b-1d2e383296c5-kube-api-access-hsk52\") pod \"nova-scheduler-0\" (UID: \"5083fc02-32d1-460e-899b-1d2e383296c5\") " pod="openstack/nova-scheduler-0" Nov 21 16:37:12 crc kubenswrapper[4774]: I1121 16:37:12.531937 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.004181 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Nov 21 16:37:13 crc kubenswrapper[4774]: W1121 16:37:13.010448 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5083fc02_32d1_460e_899b_1d2e383296c5.slice/crio-bb7e99493f0094e594168f77d037d9bf5a92bbe0ea4409d03abf5c83838da6b9 WatchSource:0}: Error finding container bb7e99493f0094e594168f77d037d9bf5a92bbe0ea4409d03abf5c83838da6b9: Status 404 returned error can't find the container with id bb7e99493f0094e594168f77d037d9bf5a92bbe0ea4409d03abf5c83838da6b9 Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.155838 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5083fc02-32d1-460e-899b-1d2e383296c5","Type":"ContainerStarted","Data":"bb7e99493f0094e594168f77d037d9bf5a92bbe0ea4409d03abf5c83838da6b9"} Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.158682 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce4fb5ae-af56-4695-b264-e399649045f5","Type":"ContainerStarted","Data":"eaea6e4e4d8bd0c83ff5e6faf60b1bb7b53e0cb747a21701c04dfe4c9e421013"} Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.158734 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"ce4fb5ae-af56-4695-b264-e399649045f5","Type":"ContainerStarted","Data":"b83aeeb72011e8dcb717ecde4f9ebc414f1769edbab6a05902c302364da571dc"} Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.182467 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7","Type":"ContainerStarted","Data":"0d9b8bc289c90be40d9284518b4f865a200673779b07d2b4e8288eacdf4c3da1"} Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.192068 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.192048594 podStartE2EDuration="3.192048594s" podCreationTimestamp="2025-11-21 16:37:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:37:13.190940762 +0000 UTC m=+9223.843140041" watchObservedRunningTime="2025-11-21 16:37:13.192048594 +0000 UTC m=+9223.844247863" Nov 21 16:37:13 crc kubenswrapper[4774]: I1121 16:37:13.226611 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.226594102 podStartE2EDuration="3.226594102s" podCreationTimestamp="2025-11-21 16:37:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:37:13.20802004 +0000 UTC m=+9223.860219309" watchObservedRunningTime="2025-11-21 16:37:13.226594102 +0000 UTC m=+9223.878793361" Nov 21 16:37:14 crc kubenswrapper[4774]: I1121 16:37:14.119614 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19e323d5-4bb6-4769-aab0-fe396014cc08" path="/var/lib/kubelet/pods/19e323d5-4bb6-4769-aab0-fe396014cc08/volumes" Nov 21 16:37:14 crc kubenswrapper[4774]: I1121 16:37:14.199395 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5083fc02-32d1-460e-899b-1d2e383296c5","Type":"ContainerStarted","Data":"00fc5f7cafb5a206d8341f03df8955990e572c2f8371b1427417c307dc9bb69e"} Nov 21 16:37:14 crc kubenswrapper[4774]: I1121 16:37:14.229808 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.229786071 podStartE2EDuration="2.229786071s" podCreationTimestamp="2025-11-21 16:37:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-21 16:37:14.220489475 +0000 UTC m=+9224.872688784" watchObservedRunningTime="2025-11-21 16:37:14.229786071 +0000 UTC m=+9224.881985330" Nov 21 16:37:15 crc kubenswrapper[4774]: I1121 16:37:15.741681 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 16:37:15 crc kubenswrapper[4774]: I1121 16:37:15.742029 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Nov 21 16:37:17 crc kubenswrapper[4774]: I1121 16:37:17.532951 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Nov 21 16:37:18 crc kubenswrapper[4774]: I1121 16:37:18.512467 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Nov 21 16:37:19 crc kubenswrapper[4774]: I1121 16:37:19.599149 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Nov 21 16:37:20 crc kubenswrapper[4774]: I1121 16:37:20.715753 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 16:37:20 crc kubenswrapper[4774]: I1121 16:37:20.717033 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Nov 21 16:37:20 crc kubenswrapper[4774]: I1121 16:37:20.742523 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 16:37:20 crc kubenswrapper[4774]: I1121 16:37:20.742579 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Nov 21 16:37:21 crc kubenswrapper[4774]: I1121 16:37:21.798147 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 16:37:21 crc kubenswrapper[4774]: I1121 16:37:21.881535 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 16:37:21 crc kubenswrapper[4774]: I1121 16:37:21.881835 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce4fb5ae-af56-4695-b264-e399649045f5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 16:37:21 crc kubenswrapper[4774]: I1121 16:37:21.881931 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="ce4fb5ae-af56-4695-b264-e399649045f5" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 21 16:37:22 crc kubenswrapper[4774]: I1121 16:37:22.532573 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Nov 21 16:37:22 crc kubenswrapper[4774]: I1121 16:37:22.565059 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Nov 21 16:37:23 crc kubenswrapper[4774]: I1121 16:37:23.337478 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.721292 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.721980 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.722600 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.723003 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.726151 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.727565 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.748090 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.748165 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.750961 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 16:37:30 crc kubenswrapper[4774]: I1121 16:37:30.751094 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.900539 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l"] Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.902293 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.905437 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.905870 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.906043 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.906204 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-adoption-secret" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.906321 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-dockercfg-qqhx4" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.906467 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-cells-global-config" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.906582 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-cell1" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.915314 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l"] Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962349 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962435 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962466 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962539 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962567 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962595 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962641 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962720 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962755 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962783 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-kube-api-access-prpvz\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:31 crc kubenswrapper[4774]: I1121 16:37:31.962808 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.064902 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065117 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065166 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065321 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065404 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065512 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065620 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065686 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065734 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065777 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-kube-api-access-prpvz\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.065814 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.066998 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.067556 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.071035 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.071148 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.071545 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ceph\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.071668 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ssh-key\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.071698 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-0\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.072575 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-inventory\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.073151 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-1\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.073719 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-combined-ca-bundle\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.083940 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-kube-api-access-prpvz\") pod \"nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.236010 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:37:32 crc kubenswrapper[4774]: I1121 16:37:32.768345 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l"] Nov 21 16:37:33 crc kubenswrapper[4774]: I1121 16:37:33.404779 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" event={"ID":"d79df006-a98f-45ed-9ef1-ddbf5e3143c5","Type":"ContainerStarted","Data":"11a0268d070af2fac8a2dd23b257888c6c3cbd8596045b2cf0f0c723c6da6e90"} Nov 21 16:37:34 crc kubenswrapper[4774]: I1121 16:37:34.419251 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" event={"ID":"d79df006-a98f-45ed-9ef1-ddbf5e3143c5","Type":"ContainerStarted","Data":"8bb386911c37f84b286ec05da02b55239c3ead476ca32124642a255b2da3e4c0"} Nov 21 16:37:34 crc kubenswrapper[4774]: I1121 16:37:34.444234 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" podStartSLOduration=2.895831748 podStartE2EDuration="3.444216552s" podCreationTimestamp="2025-11-21 16:37:31 +0000 UTC" firstStartedPulling="2025-11-21 16:37:32.776688802 +0000 UTC m=+9243.428888061" lastFinishedPulling="2025-11-21 16:37:33.325073606 +0000 UTC m=+9243.977272865" observedRunningTime="2025-11-21 16:37:34.437350356 +0000 UTC m=+9245.089549615" watchObservedRunningTime="2025-11-21 16:37:34.444216552 +0000 UTC m=+9245.096415811" Nov 21 16:38:29 crc kubenswrapper[4774]: I1121 16:38:29.600358 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:38:29 crc kubenswrapper[4774]: I1121 16:38:29.601022 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:38:59 crc kubenswrapper[4774]: I1121 16:38:59.601431 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:38:59 crc kubenswrapper[4774]: I1121 16:38:59.602250 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.162068 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-f8nkd"] Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.165687 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.185417 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8nkd"] Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.218474 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-utilities\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.218795 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22d45\" (UniqueName: \"kubernetes.io/projected/216b93ec-f26a-4918-b694-2d9ebb78870e-kube-api-access-22d45\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.219072 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-catalog-content\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.322096 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-catalog-content\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.322249 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-utilities\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.322292 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22d45\" (UniqueName: \"kubernetes.io/projected/216b93ec-f26a-4918-b694-2d9ebb78870e-kube-api-access-22d45\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.322542 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-catalog-content\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.322626 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-utilities\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.352473 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22d45\" (UniqueName: \"kubernetes.io/projected/216b93ec-f26a-4918-b694-2d9ebb78870e-kube-api-access-22d45\") pod \"redhat-marketplace-f8nkd\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.490267 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:17 crc kubenswrapper[4774]: I1121 16:39:17.971515 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8nkd"] Nov 21 16:39:17 crc kubenswrapper[4774]: W1121 16:39:17.976210 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod216b93ec_f26a_4918_b694_2d9ebb78870e.slice/crio-6838e0c68b2c51d9bda462f18b260479edc94f6f9adc3df8f9c455b581f044ab WatchSource:0}: Error finding container 6838e0c68b2c51d9bda462f18b260479edc94f6f9adc3df8f9c455b581f044ab: Status 404 returned error can't find the container with id 6838e0c68b2c51d9bda462f18b260479edc94f6f9adc3df8f9c455b581f044ab Nov 21 16:39:18 crc kubenswrapper[4774]: I1121 16:39:18.585433 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8nkd" event={"ID":"216b93ec-f26a-4918-b694-2d9ebb78870e","Type":"ContainerStarted","Data":"6838e0c68b2c51d9bda462f18b260479edc94f6f9adc3df8f9c455b581f044ab"} Nov 21 16:39:19 crc kubenswrapper[4774]: I1121 16:39:19.600611 4774 generic.go:334] "Generic (PLEG): container finished" podID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerID="02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2" exitCode=0 Nov 21 16:39:19 crc kubenswrapper[4774]: I1121 16:39:19.600671 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8nkd" event={"ID":"216b93ec-f26a-4918-b694-2d9ebb78870e","Type":"ContainerDied","Data":"02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2"} Nov 21 16:39:19 crc kubenswrapper[4774]: I1121 16:39:19.603221 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:39:21 crc kubenswrapper[4774]: I1121 16:39:21.622555 4774 generic.go:334] "Generic (PLEG): container finished" podID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerID="db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0" exitCode=0 Nov 21 16:39:21 crc kubenswrapper[4774]: I1121 16:39:21.622646 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8nkd" event={"ID":"216b93ec-f26a-4918-b694-2d9ebb78870e","Type":"ContainerDied","Data":"db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0"} Nov 21 16:39:22 crc kubenswrapper[4774]: I1121 16:39:22.634574 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8nkd" event={"ID":"216b93ec-f26a-4918-b694-2d9ebb78870e","Type":"ContainerStarted","Data":"f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34"} Nov 21 16:39:22 crc kubenswrapper[4774]: I1121 16:39:22.657961 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-f8nkd" podStartSLOduration=3.241484565 podStartE2EDuration="5.657940344s" podCreationTimestamp="2025-11-21 16:39:17 +0000 UTC" firstStartedPulling="2025-11-21 16:39:19.602906573 +0000 UTC m=+9350.255105832" lastFinishedPulling="2025-11-21 16:39:22.019362352 +0000 UTC m=+9352.671561611" observedRunningTime="2025-11-21 16:39:22.648287408 +0000 UTC m=+9353.300486737" watchObservedRunningTime="2025-11-21 16:39:22.657940344 +0000 UTC m=+9353.310139613" Nov 21 16:39:27 crc kubenswrapper[4774]: I1121 16:39:27.491511 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:27 crc kubenswrapper[4774]: I1121 16:39:27.492803 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:27 crc kubenswrapper[4774]: I1121 16:39:27.575592 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:27 crc kubenswrapper[4774]: I1121 16:39:27.748136 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:27 crc kubenswrapper[4774]: I1121 16:39:27.819087 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8nkd"] Nov 21 16:39:29 crc kubenswrapper[4774]: I1121 16:39:29.601119 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:39:29 crc kubenswrapper[4774]: I1121 16:39:29.601536 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:39:29 crc kubenswrapper[4774]: I1121 16:39:29.601591 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:39:29 crc kubenswrapper[4774]: I1121 16:39:29.602249 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:39:29 crc kubenswrapper[4774]: I1121 16:39:29.602316 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" gracePeriod=600 Nov 21 16:39:29 crc kubenswrapper[4774]: I1121 16:39:29.709910 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-f8nkd" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="registry-server" containerID="cri-o://f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34" gracePeriod=2 Nov 21 16:39:29 crc kubenswrapper[4774]: E1121 16:39:29.731412 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.195563 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.307625 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-utilities\") pod \"216b93ec-f26a-4918-b694-2d9ebb78870e\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.307779 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22d45\" (UniqueName: \"kubernetes.io/projected/216b93ec-f26a-4918-b694-2d9ebb78870e-kube-api-access-22d45\") pod \"216b93ec-f26a-4918-b694-2d9ebb78870e\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.307810 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-catalog-content\") pod \"216b93ec-f26a-4918-b694-2d9ebb78870e\" (UID: \"216b93ec-f26a-4918-b694-2d9ebb78870e\") " Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.308426 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-utilities" (OuterVolumeSpecName: "utilities") pod "216b93ec-f26a-4918-b694-2d9ebb78870e" (UID: "216b93ec-f26a-4918-b694-2d9ebb78870e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.324848 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "216b93ec-f26a-4918-b694-2d9ebb78870e" (UID: "216b93ec-f26a-4918-b694-2d9ebb78870e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.410776 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.410810 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/216b93ec-f26a-4918-b694-2d9ebb78870e-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.724337 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" exitCode=0 Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.724454 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28"} Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.724782 4774 scope.go:117] "RemoveContainer" containerID="d0d0834ceda657ad63e78a2aa689d7cc74d9dfb1a14f0a81d702525608dd8b6b" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.726004 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:39:30 crc kubenswrapper[4774]: E1121 16:39:30.726637 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.728175 4774 generic.go:334] "Generic (PLEG): container finished" podID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerID="f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34" exitCode=0 Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.728226 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8nkd" event={"ID":"216b93ec-f26a-4918-b694-2d9ebb78870e","Type":"ContainerDied","Data":"f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34"} Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.728252 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-f8nkd" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.728264 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-f8nkd" event={"ID":"216b93ec-f26a-4918-b694-2d9ebb78870e","Type":"ContainerDied","Data":"6838e0c68b2c51d9bda462f18b260479edc94f6f9adc3df8f9c455b581f044ab"} Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.781939 4774 scope.go:117] "RemoveContainer" containerID="f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.812068 4774 scope.go:117] "RemoveContainer" containerID="db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.907987 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/216b93ec-f26a-4918-b694-2d9ebb78870e-kube-api-access-22d45" (OuterVolumeSpecName: "kube-api-access-22d45") pod "216b93ec-f26a-4918-b694-2d9ebb78870e" (UID: "216b93ec-f26a-4918-b694-2d9ebb78870e"). InnerVolumeSpecName "kube-api-access-22d45". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.923668 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22d45\" (UniqueName: \"kubernetes.io/projected/216b93ec-f26a-4918-b694-2d9ebb78870e-kube-api-access-22d45\") on node \"crc\" DevicePath \"\"" Nov 21 16:39:30 crc kubenswrapper[4774]: I1121 16:39:30.929950 4774 scope.go:117] "RemoveContainer" containerID="02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.014142 4774 scope.go:117] "RemoveContainer" containerID="f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34" Nov 21 16:39:31 crc kubenswrapper[4774]: E1121 16:39:31.014479 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34\": container with ID starting with f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34 not found: ID does not exist" containerID="f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.014717 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34"} err="failed to get container status \"f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34\": rpc error: code = NotFound desc = could not find container \"f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34\": container with ID starting with f807de37a8585b0e69827aff843c6f9404fc2f815aeabe25554625acf8553a34 not found: ID does not exist" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.014748 4774 scope.go:117] "RemoveContainer" containerID="db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0" Nov 21 16:39:31 crc kubenswrapper[4774]: E1121 16:39:31.014989 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0\": container with ID starting with db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0 not found: ID does not exist" containerID="db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.015007 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0"} err="failed to get container status \"db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0\": rpc error: code = NotFound desc = could not find container \"db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0\": container with ID starting with db31fb97555d83ae9be730430a596a1c7c4f01db6ea5e36d57ff77e7fbdf1ff0 not found: ID does not exist" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.015020 4774 scope.go:117] "RemoveContainer" containerID="02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2" Nov 21 16:39:31 crc kubenswrapper[4774]: E1121 16:39:31.015281 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2\": container with ID starting with 02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2 not found: ID does not exist" containerID="02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.015332 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2"} err="failed to get container status \"02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2\": rpc error: code = NotFound desc = could not find container \"02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2\": container with ID starting with 02adbc1c24adae53c95ae6b201bed9d053e2daea06d4091062ac84854ee999c2 not found: ID does not exist" Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.073716 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8nkd"] Nov 21 16:39:31 crc kubenswrapper[4774]: I1121 16:39:31.082800 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-f8nkd"] Nov 21 16:39:32 crc kubenswrapper[4774]: I1121 16:39:32.108189 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" path="/var/lib/kubelet/pods/216b93ec-f26a-4918-b694-2d9ebb78870e/volumes" Nov 21 16:39:44 crc kubenswrapper[4774]: I1121 16:39:44.094265 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:39:44 crc kubenswrapper[4774]: E1121 16:39:44.095225 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:39:58 crc kubenswrapper[4774]: I1121 16:39:58.094075 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:39:58 crc kubenswrapper[4774]: E1121 16:39:58.094976 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:40:11 crc kubenswrapper[4774]: I1121 16:40:11.093236 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:40:11 crc kubenswrapper[4774]: E1121 16:40:11.094095 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:40:22 crc kubenswrapper[4774]: I1121 16:40:22.093747 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:40:22 crc kubenswrapper[4774]: E1121 16:40:22.094836 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:40:34 crc kubenswrapper[4774]: I1121 16:40:34.093163 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:40:34 crc kubenswrapper[4774]: E1121 16:40:34.094068 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:40:47 crc kubenswrapper[4774]: I1121 16:40:47.094243 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:40:47 crc kubenswrapper[4774]: E1121 16:40:47.095082 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:41:02 crc kubenswrapper[4774]: I1121 16:41:02.092995 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:41:02 crc kubenswrapper[4774]: E1121 16:41:02.093900 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:41:17 crc kubenswrapper[4774]: I1121 16:41:17.092468 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:41:17 crc kubenswrapper[4774]: E1121 16:41:17.093271 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:41:32 crc kubenswrapper[4774]: I1121 16:41:32.093095 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:41:32 crc kubenswrapper[4774]: E1121 16:41:32.094150 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:41:45 crc kubenswrapper[4774]: I1121 16:41:45.093259 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:41:45 crc kubenswrapper[4774]: E1121 16:41:45.094079 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:41:57 crc kubenswrapper[4774]: I1121 16:41:57.095500 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:41:57 crc kubenswrapper[4774]: E1121 16:41:57.096900 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.822742 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-55ptr"] Nov 21 16:42:07 crc kubenswrapper[4774]: E1121 16:42:07.832435 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="registry-server" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.832464 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="registry-server" Nov 21 16:42:07 crc kubenswrapper[4774]: E1121 16:42:07.832547 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="extract-content" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.832557 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="extract-content" Nov 21 16:42:07 crc kubenswrapper[4774]: E1121 16:42:07.832599 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="extract-utilities" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.832613 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="extract-utilities" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.833796 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="216b93ec-f26a-4918-b694-2d9ebb78870e" containerName="registry-server" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.856084 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.874036 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-55ptr"] Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.950861 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbv9d\" (UniqueName: \"kubernetes.io/projected/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-kube-api-access-vbv9d\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.951359 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-utilities\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:07 crc kubenswrapper[4774]: I1121 16:42:07.951488 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-catalog-content\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.052919 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-utilities\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.053016 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-catalog-content\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.053096 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbv9d\" (UniqueName: \"kubernetes.io/projected/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-kube-api-access-vbv9d\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.053431 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-utilities\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.053843 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-catalog-content\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.070971 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbv9d\" (UniqueName: \"kubernetes.io/projected/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-kube-api-access-vbv9d\") pod \"community-operators-55ptr\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.187322 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:08 crc kubenswrapper[4774]: I1121 16:42:08.715247 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-55ptr"] Nov 21 16:42:09 crc kubenswrapper[4774]: I1121 16:42:09.423411 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-55ptr" event={"ID":"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1","Type":"ContainerStarted","Data":"9c1611a214a3553ba1e399fd8a4b6475a342bf3f135352c2054dac5208633f59"} Nov 21 16:42:10 crc kubenswrapper[4774]: I1121 16:42:10.434586 4774 generic.go:334] "Generic (PLEG): container finished" podID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerID="93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d" exitCode=0 Nov 21 16:42:10 crc kubenswrapper[4774]: I1121 16:42:10.434758 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-55ptr" event={"ID":"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1","Type":"ContainerDied","Data":"93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d"} Nov 21 16:42:12 crc kubenswrapper[4774]: I1121 16:42:12.093160 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:42:12 crc kubenswrapper[4774]: E1121 16:42:12.093948 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:42:12 crc kubenswrapper[4774]: I1121 16:42:12.461017 4774 generic.go:334] "Generic (PLEG): container finished" podID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerID="4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6" exitCode=0 Nov 21 16:42:12 crc kubenswrapper[4774]: I1121 16:42:12.461121 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-55ptr" event={"ID":"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1","Type":"ContainerDied","Data":"4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6"} Nov 21 16:42:13 crc kubenswrapper[4774]: I1121 16:42:13.478026 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-55ptr" event={"ID":"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1","Type":"ContainerStarted","Data":"f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2"} Nov 21 16:42:13 crc kubenswrapper[4774]: I1121 16:42:13.499798 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-55ptr" podStartSLOduration=4.086569034 podStartE2EDuration="6.499780557s" podCreationTimestamp="2025-11-21 16:42:07 +0000 UTC" firstStartedPulling="2025-11-21 16:42:10.438282273 +0000 UTC m=+9521.090481532" lastFinishedPulling="2025-11-21 16:42:12.851493796 +0000 UTC m=+9523.503693055" observedRunningTime="2025-11-21 16:42:13.493315873 +0000 UTC m=+9524.145515142" watchObservedRunningTime="2025-11-21 16:42:13.499780557 +0000 UTC m=+9524.151979816" Nov 21 16:42:18 crc kubenswrapper[4774]: I1121 16:42:18.188314 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:18 crc kubenswrapper[4774]: I1121 16:42:18.189057 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:18 crc kubenswrapper[4774]: I1121 16:42:18.244721 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:18 crc kubenswrapper[4774]: I1121 16:42:18.582390 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:18 crc kubenswrapper[4774]: I1121 16:42:18.628939 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-55ptr"] Nov 21 16:42:20 crc kubenswrapper[4774]: I1121 16:42:20.553300 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-55ptr" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="registry-server" containerID="cri-o://f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2" gracePeriod=2 Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.000708 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.167274 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-catalog-content\") pod \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.167391 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-utilities\") pod \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.167437 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbv9d\" (UniqueName: \"kubernetes.io/projected/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-kube-api-access-vbv9d\") pod \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\" (UID: \"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1\") " Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.168429 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-utilities" (OuterVolumeSpecName: "utilities") pod "4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" (UID: "4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.174143 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-kube-api-access-vbv9d" (OuterVolumeSpecName: "kube-api-access-vbv9d") pod "4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" (UID: "4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1"). InnerVolumeSpecName "kube-api-access-vbv9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.228416 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" (UID: "4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.271346 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.271515 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.271542 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbv9d\" (UniqueName: \"kubernetes.io/projected/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1-kube-api-access-vbv9d\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.566860 4774 generic.go:334] "Generic (PLEG): container finished" podID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerID="f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2" exitCode=0 Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.566917 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-55ptr" event={"ID":"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1","Type":"ContainerDied","Data":"f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2"} Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.566949 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-55ptr" event={"ID":"4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1","Type":"ContainerDied","Data":"9c1611a214a3553ba1e399fd8a4b6475a342bf3f135352c2054dac5208633f59"} Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.566970 4774 scope.go:117] "RemoveContainer" containerID="f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.567127 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-55ptr" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.588284 4774 scope.go:117] "RemoveContainer" containerID="4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.610908 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-55ptr"] Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.619678 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-55ptr"] Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.636769 4774 scope.go:117] "RemoveContainer" containerID="93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.712563 4774 scope.go:117] "RemoveContainer" containerID="f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2" Nov 21 16:42:21 crc kubenswrapper[4774]: E1121 16:42:21.719695 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2\": container with ID starting with f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2 not found: ID does not exist" containerID="f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.719738 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2"} err="failed to get container status \"f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2\": rpc error: code = NotFound desc = could not find container \"f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2\": container with ID starting with f901a462948a146cbf20ea03ff21ddedc494297cda5407812b85d64f4d12b5d2 not found: ID does not exist" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.719771 4774 scope.go:117] "RemoveContainer" containerID="4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6" Nov 21 16:42:21 crc kubenswrapper[4774]: E1121 16:42:21.729156 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6\": container with ID starting with 4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6 not found: ID does not exist" containerID="4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.729212 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6"} err="failed to get container status \"4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6\": rpc error: code = NotFound desc = could not find container \"4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6\": container with ID starting with 4fd57a502ff1a2b5e79c0e889741523ab0805b74a473a32d4c2b5a9ba388bee6 not found: ID does not exist" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.729244 4774 scope.go:117] "RemoveContainer" containerID="93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d" Nov 21 16:42:21 crc kubenswrapper[4774]: E1121 16:42:21.769168 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d\": container with ID starting with 93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d not found: ID does not exist" containerID="93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d" Nov 21 16:42:21 crc kubenswrapper[4774]: I1121 16:42:21.769223 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d"} err="failed to get container status \"93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d\": rpc error: code = NotFound desc = could not find container \"93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d\": container with ID starting with 93dd041e5a58594e85c6dc48469d07668b1b94c0c659093622ceb990a4e9f70d not found: ID does not exist" Nov 21 16:42:22 crc kubenswrapper[4774]: I1121 16:42:22.112237 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" path="/var/lib/kubelet/pods/4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1/volumes" Nov 21 16:42:27 crc kubenswrapper[4774]: I1121 16:42:27.093653 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:42:27 crc kubenswrapper[4774]: E1121 16:42:27.094517 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:42:42 crc kubenswrapper[4774]: I1121 16:42:42.094193 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:42:42 crc kubenswrapper[4774]: E1121 16:42:42.095047 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:42:54 crc kubenswrapper[4774]: I1121 16:42:54.093859 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:42:54 crc kubenswrapper[4774]: E1121 16:42:54.094744 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:42:56 crc kubenswrapper[4774]: I1121 16:42:56.959667 4774 generic.go:334] "Generic (PLEG): container finished" podID="d79df006-a98f-45ed-9ef1-ddbf5e3143c5" containerID="8bb386911c37f84b286ec05da02b55239c3ead476ca32124642a255b2da3e4c0" exitCode=0 Nov 21 16:42:56 crc kubenswrapper[4774]: I1121 16:42:56.959769 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" event={"ID":"d79df006-a98f-45ed-9ef1-ddbf5e3143c5","Type":"ContainerDied","Data":"8bb386911c37f84b286ec05da02b55239c3ead476ca32124642a255b2da3e4c0"} Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.670762 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782234 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-0\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782377 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-0\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782405 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ssh-key\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782433 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-1\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782469 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-kube-api-access-prpvz\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782493 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-1\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782518 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-0\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.782654 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-combined-ca-bundle\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.783006 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-1\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.783064 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ceph\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.783139 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-inventory\") pod \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\" (UID: \"d79df006-a98f-45ed-9ef1-ddbf5e3143c5\") " Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.788737 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-kube-api-access-prpvz" (OuterVolumeSpecName: "kube-api-access-prpvz") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "kube-api-access-prpvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.789348 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ceph" (OuterVolumeSpecName: "ceph") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.791982 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-combined-ca-bundle" (OuterVolumeSpecName: "nova-cell1-combined-ca-bundle") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-cell1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.813147 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-1" (OuterVolumeSpecName: "nova-cells-global-config-1") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-cells-global-config-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.817695 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.818954 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.820678 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.824091 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-0" (OuterVolumeSpecName: "nova-cells-global-config-0") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-cells-global-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.828179 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-inventory" (OuterVolumeSpecName: "inventory") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.829448 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.830258 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "d79df006-a98f-45ed-9ef1-ddbf5e3143c5" (UID: "d79df006-a98f-45ed-9ef1-ddbf5e3143c5"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887010 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887052 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-0\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887065 4774 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ssh-key\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887078 4774 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887090 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cells-global-config-1\" (UniqueName: \"kubernetes.io/configmap/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cells-global-config-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887104 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prpvz\" (UniqueName: \"kubernetes.io/projected/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-kube-api-access-prpvz\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887116 4774 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887128 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887140 4774 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887154 4774 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-ceph\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.887166 4774 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d79df006-a98f-45ed-9ef1-ddbf5e3143c5-inventory\") on node \"crc\" DevicePath \"\"" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.989720 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" event={"ID":"d79df006-a98f-45ed-9ef1-ddbf5e3143c5","Type":"ContainerDied","Data":"11a0268d070af2fac8a2dd23b257888c6c3cbd8596045b2cf0f0c723c6da6e90"} Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.989771 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11a0268d070af2fac8a2dd23b257888c6c3cbd8596045b2cf0f0c723c6da6e90" Nov 21 16:42:58 crc kubenswrapper[4774]: I1121 16:42:58.989789 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l" Nov 21 16:43:05 crc kubenswrapper[4774]: I1121 16:43:05.093517 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:43:05 crc kubenswrapper[4774]: E1121 16:43:05.094270 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:43:16 crc kubenswrapper[4774]: I1121 16:43:16.093595 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:43:16 crc kubenswrapper[4774]: E1121 16:43:16.095348 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:43:29 crc kubenswrapper[4774]: I1121 16:43:29.093752 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:43:29 crc kubenswrapper[4774]: E1121 16:43:29.094889 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:43:43 crc kubenswrapper[4774]: I1121 16:43:43.093702 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:43:43 crc kubenswrapper[4774]: E1121 16:43:43.094534 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.557699 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cpfgx"] Nov 21 16:43:51 crc kubenswrapper[4774]: E1121 16:43:51.558713 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="extract-utilities" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.558728 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="extract-utilities" Nov 21 16:43:51 crc kubenswrapper[4774]: E1121 16:43:51.558744 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="registry-server" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.558751 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="registry-server" Nov 21 16:43:51 crc kubenswrapper[4774]: E1121 16:43:51.558759 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d79df006-a98f-45ed-9ef1-ddbf5e3143c5" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.558767 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="d79df006-a98f-45ed-9ef1-ddbf5e3143c5" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 21 16:43:51 crc kubenswrapper[4774]: E1121 16:43:51.558834 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="extract-content" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.558841 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="extract-content" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.559052 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="d79df006-a98f-45ed-9ef1-ddbf5e3143c5" containerName="nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.559081 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d3a5e10-5edd-4d5e-b6d0-76ea8f3163c1" containerName="registry-server" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.560806 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.585618 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cpfgx"] Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.738707 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-utilities\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.738796 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64hl9\" (UniqueName: \"kubernetes.io/projected/22db57a9-c20d-4cd9-8dbc-654192379628-kube-api-access-64hl9\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.739253 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-catalog-content\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.841953 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64hl9\" (UniqueName: \"kubernetes.io/projected/22db57a9-c20d-4cd9-8dbc-654192379628-kube-api-access-64hl9\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.842048 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-catalog-content\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.842180 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-utilities\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.842600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-utilities\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.842603 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-catalog-content\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.859719 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64hl9\" (UniqueName: \"kubernetes.io/projected/22db57a9-c20d-4cd9-8dbc-654192379628-kube-api-access-64hl9\") pod \"redhat-operators-cpfgx\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:51 crc kubenswrapper[4774]: I1121 16:43:51.887797 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:43:52 crc kubenswrapper[4774]: I1121 16:43:52.327055 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cpfgx"] Nov 21 16:43:52 crc kubenswrapper[4774]: W1121 16:43:52.329758 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22db57a9_c20d_4cd9_8dbc_654192379628.slice/crio-1b784f24f76eb2b264c112830b05586f484848711adf66da9e3c08c04e2ba670 WatchSource:0}: Error finding container 1b784f24f76eb2b264c112830b05586f484848711adf66da9e3c08c04e2ba670: Status 404 returned error can't find the container with id 1b784f24f76eb2b264c112830b05586f484848711adf66da9e3c08c04e2ba670 Nov 21 16:43:52 crc kubenswrapper[4774]: I1121 16:43:52.576861 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerStarted","Data":"70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44"} Nov 21 16:43:52 crc kubenswrapper[4774]: I1121 16:43:52.577173 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerStarted","Data":"1b784f24f76eb2b264c112830b05586f484848711adf66da9e3c08c04e2ba670"} Nov 21 16:43:53 crc kubenswrapper[4774]: I1121 16:43:53.590049 4774 generic.go:334] "Generic (PLEG): container finished" podID="22db57a9-c20d-4cd9-8dbc-654192379628" containerID="70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44" exitCode=0 Nov 21 16:43:53 crc kubenswrapper[4774]: I1121 16:43:53.590126 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerDied","Data":"70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44"} Nov 21 16:43:54 crc kubenswrapper[4774]: I1121 16:43:54.602282 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerStarted","Data":"4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41"} Nov 21 16:43:57 crc kubenswrapper[4774]: I1121 16:43:57.093020 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:43:57 crc kubenswrapper[4774]: E1121 16:43:57.093852 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:43:58 crc kubenswrapper[4774]: I1121 16:43:58.649807 4774 generic.go:334] "Generic (PLEG): container finished" podID="22db57a9-c20d-4cd9-8dbc-654192379628" containerID="4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41" exitCode=0 Nov 21 16:43:58 crc kubenswrapper[4774]: I1121 16:43:58.649925 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerDied","Data":"4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41"} Nov 21 16:43:59 crc kubenswrapper[4774]: I1121 16:43:59.663737 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerStarted","Data":"c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4"} Nov 21 16:43:59 crc kubenswrapper[4774]: I1121 16:43:59.692350 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cpfgx" podStartSLOduration=3.231528168 podStartE2EDuration="8.692321497s" podCreationTimestamp="2025-11-21 16:43:51 +0000 UTC" firstStartedPulling="2025-11-21 16:43:53.592644925 +0000 UTC m=+9624.244844174" lastFinishedPulling="2025-11-21 16:43:59.053438244 +0000 UTC m=+9629.705637503" observedRunningTime="2025-11-21 16:43:59.681634561 +0000 UTC m=+9630.333833830" watchObservedRunningTime="2025-11-21 16:43:59.692321497 +0000 UTC m=+9630.344520786" Nov 21 16:44:01 crc kubenswrapper[4774]: I1121 16:44:01.889303 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:44:01 crc kubenswrapper[4774]: I1121 16:44:01.889637 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:44:02 crc kubenswrapper[4774]: I1121 16:44:02.936946 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cpfgx" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="registry-server" probeResult="failure" output=< Nov 21 16:44:02 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:44:02 crc kubenswrapper[4774]: > Nov 21 16:44:11 crc kubenswrapper[4774]: I1121 16:44:11.946192 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:44:12 crc kubenswrapper[4774]: I1121 16:44:12.006909 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:44:12 crc kubenswrapper[4774]: I1121 16:44:12.096646 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:44:12 crc kubenswrapper[4774]: E1121 16:44:12.096919 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:44:12 crc kubenswrapper[4774]: I1121 16:44:12.189930 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cpfgx"] Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.206703 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cpfgx" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="registry-server" containerID="cri-o://c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4" gracePeriod=2 Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.696409 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.761075 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-utilities\") pod \"22db57a9-c20d-4cd9-8dbc-654192379628\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.761160 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-catalog-content\") pod \"22db57a9-c20d-4cd9-8dbc-654192379628\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.761278 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64hl9\" (UniqueName: \"kubernetes.io/projected/22db57a9-c20d-4cd9-8dbc-654192379628-kube-api-access-64hl9\") pod \"22db57a9-c20d-4cd9-8dbc-654192379628\" (UID: \"22db57a9-c20d-4cd9-8dbc-654192379628\") " Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.761853 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-utilities" (OuterVolumeSpecName: "utilities") pod "22db57a9-c20d-4cd9-8dbc-654192379628" (UID: "22db57a9-c20d-4cd9-8dbc-654192379628"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.762211 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.766329 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22db57a9-c20d-4cd9-8dbc-654192379628-kube-api-access-64hl9" (OuterVolumeSpecName: "kube-api-access-64hl9") pod "22db57a9-c20d-4cd9-8dbc-654192379628" (UID: "22db57a9-c20d-4cd9-8dbc-654192379628"). InnerVolumeSpecName "kube-api-access-64hl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.856958 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22db57a9-c20d-4cd9-8dbc-654192379628" (UID: "22db57a9-c20d-4cd9-8dbc-654192379628"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.865019 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64hl9\" (UniqueName: \"kubernetes.io/projected/22db57a9-c20d-4cd9-8dbc-654192379628-kube-api-access-64hl9\") on node \"crc\" DevicePath \"\"" Nov 21 16:44:13 crc kubenswrapper[4774]: I1121 16:44:13.865069 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22db57a9-c20d-4cd9-8dbc-654192379628-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.219314 4774 generic.go:334] "Generic (PLEG): container finished" podID="22db57a9-c20d-4cd9-8dbc-654192379628" containerID="c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4" exitCode=0 Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.219370 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerDied","Data":"c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4"} Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.219407 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cpfgx" event={"ID":"22db57a9-c20d-4cd9-8dbc-654192379628","Type":"ContainerDied","Data":"1b784f24f76eb2b264c112830b05586f484848711adf66da9e3c08c04e2ba670"} Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.219432 4774 scope.go:117] "RemoveContainer" containerID="c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.219623 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cpfgx" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.256029 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cpfgx"] Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.264547 4774 scope.go:117] "RemoveContainer" containerID="4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.264661 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cpfgx"] Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.284662 4774 scope.go:117] "RemoveContainer" containerID="70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.341603 4774 scope.go:117] "RemoveContainer" containerID="c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4" Nov 21 16:44:14 crc kubenswrapper[4774]: E1121 16:44:14.342109 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4\": container with ID starting with c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4 not found: ID does not exist" containerID="c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.342161 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4"} err="failed to get container status \"c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4\": rpc error: code = NotFound desc = could not find container \"c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4\": container with ID starting with c4df5b98f4a9d195e196ec98a0283a7f514cc247f7fcf3baf90a904f2717f1c4 not found: ID does not exist" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.342193 4774 scope.go:117] "RemoveContainer" containerID="4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41" Nov 21 16:44:14 crc kubenswrapper[4774]: E1121 16:44:14.342686 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41\": container with ID starting with 4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41 not found: ID does not exist" containerID="4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.342753 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41"} err="failed to get container status \"4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41\": rpc error: code = NotFound desc = could not find container \"4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41\": container with ID starting with 4af1db98086451bc97c9c4fdc6af55063f9231594a373afd710e5d597601ce41 not found: ID does not exist" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.342783 4774 scope.go:117] "RemoveContainer" containerID="70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44" Nov 21 16:44:14 crc kubenswrapper[4774]: E1121 16:44:14.343196 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44\": container with ID starting with 70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44 not found: ID does not exist" containerID="70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44" Nov 21 16:44:14 crc kubenswrapper[4774]: I1121 16:44:14.343243 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44"} err="failed to get container status \"70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44\": rpc error: code = NotFound desc = could not find container \"70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44\": container with ID starting with 70927686a621f968c183b468f944af0cbf5b25f12a1488eb46b67d7fadfc9e44 not found: ID does not exist" Nov 21 16:44:16 crc kubenswrapper[4774]: I1121 16:44:16.107049 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" path="/var/lib/kubelet/pods/22db57a9-c20d-4cd9-8dbc-654192379628/volumes" Nov 21 16:44:25 crc kubenswrapper[4774]: I1121 16:44:25.094089 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:44:25 crc kubenswrapper[4774]: E1121 16:44:25.095351 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:44:37 crc kubenswrapper[4774]: I1121 16:44:37.093542 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:44:37 crc kubenswrapper[4774]: I1121 16:44:37.454586 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"863a82dc07f537f948868ebedea5b04fff97cceb2f10dbb0bd356fdc76e8ae25"} Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.175993 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw"] Nov 21 16:45:00 crc kubenswrapper[4774]: E1121 16:45:00.176868 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="extract-utilities" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.176881 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="extract-utilities" Nov 21 16:45:00 crc kubenswrapper[4774]: E1121 16:45:00.176894 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="extract-content" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.176901 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="extract-content" Nov 21 16:45:00 crc kubenswrapper[4774]: E1121 16:45:00.176913 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="registry-server" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.176920 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="registry-server" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.177122 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="22db57a9-c20d-4cd9-8dbc-654192379628" containerName="registry-server" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.177946 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.180102 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.181896 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.185777 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw"] Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.313451 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qwrx\" (UniqueName: \"kubernetes.io/projected/28129145-49ad-438d-a26f-e8afa14d99c9-kube-api-access-5qwrx\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.313531 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/28129145-49ad-438d-a26f-e8afa14d99c9-config-volume\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.313901 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/28129145-49ad-438d-a26f-e8afa14d99c9-secret-volume\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.416174 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qwrx\" (UniqueName: \"kubernetes.io/projected/28129145-49ad-438d-a26f-e8afa14d99c9-kube-api-access-5qwrx\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.416246 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/28129145-49ad-438d-a26f-e8afa14d99c9-config-volume\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.416320 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/28129145-49ad-438d-a26f-e8afa14d99c9-secret-volume\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.417136 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/28129145-49ad-438d-a26f-e8afa14d99c9-config-volume\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.423521 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/28129145-49ad-438d-a26f-e8afa14d99c9-secret-volume\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.442921 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qwrx\" (UniqueName: \"kubernetes.io/projected/28129145-49ad-438d-a26f-e8afa14d99c9-kube-api-access-5qwrx\") pod \"collect-profiles-29395725-s4xnw\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.511319 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:00 crc kubenswrapper[4774]: I1121 16:45:00.985317 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw"] Nov 21 16:45:00 crc kubenswrapper[4774]: W1121 16:45:00.990055 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28129145_49ad_438d_a26f_e8afa14d99c9.slice/crio-5e3731de8fcdf5561374c1e953f12316728b27288af1f0082d9663944c8a7c00 WatchSource:0}: Error finding container 5e3731de8fcdf5561374c1e953f12316728b27288af1f0082d9663944c8a7c00: Status 404 returned error can't find the container with id 5e3731de8fcdf5561374c1e953f12316728b27288af1f0082d9663944c8a7c00 Nov 21 16:45:01 crc kubenswrapper[4774]: I1121 16:45:01.723502 4774 generic.go:334] "Generic (PLEG): container finished" podID="28129145-49ad-438d-a26f-e8afa14d99c9" containerID="2243c744315a42d0dc5560c92e1c38c753fcef8ea16eb024ee007c34a46bc131" exitCode=0 Nov 21 16:45:01 crc kubenswrapper[4774]: I1121 16:45:01.723570 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" event={"ID":"28129145-49ad-438d-a26f-e8afa14d99c9","Type":"ContainerDied","Data":"2243c744315a42d0dc5560c92e1c38c753fcef8ea16eb024ee007c34a46bc131"} Nov 21 16:45:01 crc kubenswrapper[4774]: I1121 16:45:01.723869 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" event={"ID":"28129145-49ad-438d-a26f-e8afa14d99c9","Type":"ContainerStarted","Data":"5e3731de8fcdf5561374c1e953f12316728b27288af1f0082d9663944c8a7c00"} Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.133611 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.183769 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/28129145-49ad-438d-a26f-e8afa14d99c9-config-volume\") pod \"28129145-49ad-438d-a26f-e8afa14d99c9\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.184324 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qwrx\" (UniqueName: \"kubernetes.io/projected/28129145-49ad-438d-a26f-e8afa14d99c9-kube-api-access-5qwrx\") pod \"28129145-49ad-438d-a26f-e8afa14d99c9\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.184376 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/28129145-49ad-438d-a26f-e8afa14d99c9-secret-volume\") pod \"28129145-49ad-438d-a26f-e8afa14d99c9\" (UID: \"28129145-49ad-438d-a26f-e8afa14d99c9\") " Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.184521 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28129145-49ad-438d-a26f-e8afa14d99c9-config-volume" (OuterVolumeSpecName: "config-volume") pod "28129145-49ad-438d-a26f-e8afa14d99c9" (UID: "28129145-49ad-438d-a26f-e8afa14d99c9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.185435 4774 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/28129145-49ad-438d-a26f-e8afa14d99c9-config-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.190787 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28129145-49ad-438d-a26f-e8afa14d99c9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "28129145-49ad-438d-a26f-e8afa14d99c9" (UID: "28129145-49ad-438d-a26f-e8afa14d99c9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.192719 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28129145-49ad-438d-a26f-e8afa14d99c9-kube-api-access-5qwrx" (OuterVolumeSpecName: "kube-api-access-5qwrx") pod "28129145-49ad-438d-a26f-e8afa14d99c9" (UID: "28129145-49ad-438d-a26f-e8afa14d99c9"). InnerVolumeSpecName "kube-api-access-5qwrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.287104 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qwrx\" (UniqueName: \"kubernetes.io/projected/28129145-49ad-438d-a26f-e8afa14d99c9-kube-api-access-5qwrx\") on node \"crc\" DevicePath \"\"" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.287674 4774 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/28129145-49ad-438d-a26f-e8afa14d99c9-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.742588 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" event={"ID":"28129145-49ad-438d-a26f-e8afa14d99c9","Type":"ContainerDied","Data":"5e3731de8fcdf5561374c1e953f12316728b27288af1f0082d9663944c8a7c00"} Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.742843 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e3731de8fcdf5561374c1e953f12316728b27288af1f0082d9663944c8a7c00" Nov 21 16:45:03 crc kubenswrapper[4774]: I1121 16:45:03.742701 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29395725-s4xnw" Nov 21 16:45:04 crc kubenswrapper[4774]: I1121 16:45:04.229659 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv"] Nov 21 16:45:04 crc kubenswrapper[4774]: I1121 16:45:04.241641 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29395680-nhsbv"] Nov 21 16:45:05 crc kubenswrapper[4774]: I1121 16:45:05.861604 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Nov 21 16:45:05 crc kubenswrapper[4774]: I1121 16:45:05.862540 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/mariadb-copy-data" podUID="053368cd-72d7-4402-a577-40330d37399d" containerName="adoption" containerID="cri-o://cb0c6b7c21b867b1a4af5ae43ca4a80efe310250cb8187841bb70d0ee3046fd1" gracePeriod=30 Nov 21 16:45:06 crc kubenswrapper[4774]: I1121 16:45:06.109495 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc30023-280b-430c-a86d-3655a938905e" path="/var/lib/kubelet/pods/afc30023-280b-430c-a86d-3655a938905e/volumes" Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.118743 4774 generic.go:334] "Generic (PLEG): container finished" podID="053368cd-72d7-4402-a577-40330d37399d" containerID="cb0c6b7c21b867b1a4af5ae43ca4a80efe310250cb8187841bb70d0ee3046fd1" exitCode=137 Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.118997 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"053368cd-72d7-4402-a577-40330d37399d","Type":"ContainerDied","Data":"cb0c6b7c21b867b1a4af5ae43ca4a80efe310250cb8187841bb70d0ee3046fd1"} Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.325809 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.469319 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mariadb-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") pod \"053368cd-72d7-4402-a577-40330d37399d\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.469547 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwr4w\" (UniqueName: \"kubernetes.io/projected/053368cd-72d7-4402-a577-40330d37399d-kube-api-access-fwr4w\") pod \"053368cd-72d7-4402-a577-40330d37399d\" (UID: \"053368cd-72d7-4402-a577-40330d37399d\") " Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.478113 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/053368cd-72d7-4402-a577-40330d37399d-kube-api-access-fwr4w" (OuterVolumeSpecName: "kube-api-access-fwr4w") pod "053368cd-72d7-4402-a577-40330d37399d" (UID: "053368cd-72d7-4402-a577-40330d37399d"). InnerVolumeSpecName "kube-api-access-fwr4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.486660 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50" (OuterVolumeSpecName: "mariadb-data") pod "053368cd-72d7-4402-a577-40330d37399d" (UID: "053368cd-72d7-4402-a577-40330d37399d"). InnerVolumeSpecName "pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.572230 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") on node \"crc\" " Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.572901 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwr4w\" (UniqueName: \"kubernetes.io/projected/053368cd-72d7-4402-a577-40330d37399d-kube-api-access-fwr4w\") on node \"crc\" DevicePath \"\"" Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.607873 4774 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.608081 4774 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50") on node "crc" Nov 21 16:45:36 crc kubenswrapper[4774]: I1121 16:45:36.674690 4774 reconciler_common.go:293] "Volume detached for volume \"pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-00dd7a75-1b4b-4146-8f6b-71831c34ba50\") on node \"crc\" DevicePath \"\"" Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.131148 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/mariadb-copy-data" event={"ID":"053368cd-72d7-4402-a577-40330d37399d","Type":"ContainerDied","Data":"8accd161e2705082d2ca1c1fb075c960b5242a713064ff7cdef4ac467ff122e6"} Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.131225 4774 scope.go:117] "RemoveContainer" containerID="cb0c6b7c21b867b1a4af5ae43ca4a80efe310250cb8187841bb70d0ee3046fd1" Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.131230 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/mariadb-copy-data" Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.185123 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/mariadb-copy-data"] Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.201856 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/mariadb-copy-data"] Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.824907 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Nov 21 16:45:37 crc kubenswrapper[4774]: I1121 16:45:37.825122 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ovn-copy-data" podUID="c0268166-20f0-4778-976c-58373109d561" containerName="adoption" containerID="cri-o://67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34" gracePeriod=30 Nov 21 16:45:38 crc kubenswrapper[4774]: I1121 16:45:38.105454 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="053368cd-72d7-4402-a577-40330d37399d" path="/var/lib/kubelet/pods/053368cd-72d7-4402-a577-40330d37399d/volumes" Nov 21 16:45:54 crc kubenswrapper[4774]: I1121 16:45:54.548630 4774 scope.go:117] "RemoveContainer" containerID="350ead7a4de4dfa93aa2dcd8c09bf42b4e56b2a310b2b9f48cc266d209c15b3b" Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.339562 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.474536 4774 generic.go:334] "Generic (PLEG): container finished" podID="c0268166-20f0-4778-976c-58373109d561" containerID="67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34" exitCode=137 Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.474992 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"c0268166-20f0-4778-976c-58373109d561","Type":"ContainerDied","Data":"67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34"} Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.475035 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-copy-data" event={"ID":"c0268166-20f0-4778-976c-58373109d561","Type":"ContainerDied","Data":"9e87706406e065df33a5517807cf42030e8f357753d1cd191ad8636076191431"} Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.475058 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-copy-data" Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.475065 4774 scope.go:117] "RemoveContainer" containerID="67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34" Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.505705 4774 scope.go:117] "RemoveContainer" containerID="67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34" Nov 21 16:46:08 crc kubenswrapper[4774]: E1121 16:46:08.506173 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34\": container with ID starting with 67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34 not found: ID does not exist" containerID="67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34" Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.506213 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34"} err="failed to get container status \"67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34\": rpc error: code = NotFound desc = could not find container \"67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34\": container with ID starting with 67a5481fc092c431200edc77c9e1b83f79c3f46c579494624b21cd958dd2cf34 not found: ID does not exist" Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.522035 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") pod \"c0268166-20f0-4778-976c-58373109d561\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.522115 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hsjwj\" (UniqueName: \"kubernetes.io/projected/c0268166-20f0-4778-976c-58373109d561-kube-api-access-hsjwj\") pod \"c0268166-20f0-4778-976c-58373109d561\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " Nov 21 16:46:08 crc kubenswrapper[4774]: I1121 16:46:08.522334 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c0268166-20f0-4778-976c-58373109d561-ovn-data-cert\") pod \"c0268166-20f0-4778-976c-58373109d561\" (UID: \"c0268166-20f0-4778-976c-58373109d561\") " Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.212065 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0268166-20f0-4778-976c-58373109d561-kube-api-access-hsjwj" (OuterVolumeSpecName: "kube-api-access-hsjwj") pod "c0268166-20f0-4778-976c-58373109d561" (UID: "c0268166-20f0-4778-976c-58373109d561"). InnerVolumeSpecName "kube-api-access-hsjwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.213130 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c0268166-20f0-4778-976c-58373109d561-ovn-data-cert" (OuterVolumeSpecName: "ovn-data-cert") pod "c0268166-20f0-4778-976c-58373109d561" (UID: "c0268166-20f0-4778-976c-58373109d561"). InnerVolumeSpecName "ovn-data-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.243274 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b" (OuterVolumeSpecName: "ovn-data") pod "c0268166-20f0-4778-976c-58373109d561" (UID: "c0268166-20f0-4778-976c-58373109d561"). InnerVolumeSpecName "pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.243522 4774 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") on node \"crc\" " Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.244610 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hsjwj\" (UniqueName: \"kubernetes.io/projected/c0268166-20f0-4778-976c-58373109d561-kube-api-access-hsjwj\") on node \"crc\" DevicePath \"\"" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.244875 4774 reconciler_common.go:293] "Volume detached for volume \"ovn-data-cert\" (UniqueName: \"kubernetes.io/secret/c0268166-20f0-4778-976c-58373109d561-ovn-data-cert\") on node \"crc\" DevicePath \"\"" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.316682 4774 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.316872 4774 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b") on node "crc" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.346327 4774 reconciler_common.go:293] "Volume detached for volume \"pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b7541f98-fef0-4c85-95cc-c95456ba5b3b\") on node \"crc\" DevicePath \"\"" Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.411602 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-copy-data"] Nov 21 16:46:09 crc kubenswrapper[4774]: I1121 16:46:09.423167 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-copy-data"] Nov 21 16:46:10 crc kubenswrapper[4774]: I1121 16:46:10.108738 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0268166-20f0-4778-976c-58373109d561" path="/var/lib/kubelet/pods/c0268166-20f0-4778-976c-58373109d561/volumes" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.785610 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5x7xf"] Nov 21 16:46:26 crc kubenswrapper[4774]: E1121 16:46:26.786738 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="053368cd-72d7-4402-a577-40330d37399d" containerName="adoption" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.786757 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="053368cd-72d7-4402-a577-40330d37399d" containerName="adoption" Nov 21 16:46:26 crc kubenswrapper[4774]: E1121 16:46:26.786801 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28129145-49ad-438d-a26f-e8afa14d99c9" containerName="collect-profiles" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.786809 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="28129145-49ad-438d-a26f-e8afa14d99c9" containerName="collect-profiles" Nov 21 16:46:26 crc kubenswrapper[4774]: E1121 16:46:26.786847 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0268166-20f0-4778-976c-58373109d561" containerName="adoption" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.786856 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0268166-20f0-4778-976c-58373109d561" containerName="adoption" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.787174 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0268166-20f0-4778-976c-58373109d561" containerName="adoption" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.787193 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="053368cd-72d7-4402-a577-40330d37399d" containerName="adoption" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.787208 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="28129145-49ad-438d-a26f-e8afa14d99c9" containerName="collect-profiles" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.792501 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.803625 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x7xf"] Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.957188 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8k8k\" (UniqueName: \"kubernetes.io/projected/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-kube-api-access-g8k8k\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.957290 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-utilities\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:26 crc kubenswrapper[4774]: I1121 16:46:26.957345 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-catalog-content\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.059540 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-utilities\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.059638 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-catalog-content\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.059768 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8k8k\" (UniqueName: \"kubernetes.io/projected/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-kube-api-access-g8k8k\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.060600 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-utilities\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.060846 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-catalog-content\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.085978 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8k8k\" (UniqueName: \"kubernetes.io/projected/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-kube-api-access-g8k8k\") pod \"certified-operators-5x7xf\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.121283 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:27 crc kubenswrapper[4774]: I1121 16:46:27.739800 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5x7xf"] Nov 21 16:46:28 crc kubenswrapper[4774]: I1121 16:46:28.708228 4774 generic.go:334] "Generic (PLEG): container finished" podID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerID="13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c" exitCode=0 Nov 21 16:46:28 crc kubenswrapper[4774]: I1121 16:46:28.708270 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerDied","Data":"13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c"} Nov 21 16:46:28 crc kubenswrapper[4774]: I1121 16:46:28.708724 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerStarted","Data":"7586a754fd480872b26bcd5cf779a66fe895a8be6961a75022248bbe19329662"} Nov 21 16:46:28 crc kubenswrapper[4774]: I1121 16:46:28.710943 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:46:29 crc kubenswrapper[4774]: I1121 16:46:29.721077 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerStarted","Data":"b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae"} Nov 21 16:46:30 crc kubenswrapper[4774]: I1121 16:46:30.738445 4774 generic.go:334] "Generic (PLEG): container finished" podID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerID="b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae" exitCode=0 Nov 21 16:46:30 crc kubenswrapper[4774]: I1121 16:46:30.738537 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerDied","Data":"b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae"} Nov 21 16:46:31 crc kubenswrapper[4774]: I1121 16:46:31.775234 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerStarted","Data":"78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a"} Nov 21 16:46:31 crc kubenswrapper[4774]: I1121 16:46:31.799162 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5x7xf" podStartSLOduration=3.313444788 podStartE2EDuration="5.799130383s" podCreationTimestamp="2025-11-21 16:46:26 +0000 UTC" firstStartedPulling="2025-11-21 16:46:28.710696398 +0000 UTC m=+9779.362895657" lastFinishedPulling="2025-11-21 16:46:31.196381993 +0000 UTC m=+9781.848581252" observedRunningTime="2025-11-21 16:46:31.795559941 +0000 UTC m=+9782.447759200" watchObservedRunningTime="2025-11-21 16:46:31.799130383 +0000 UTC m=+9782.451329642" Nov 21 16:46:37 crc kubenswrapper[4774]: I1121 16:46:37.121868 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:37 crc kubenswrapper[4774]: I1121 16:46:37.122487 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:37 crc kubenswrapper[4774]: I1121 16:46:37.190044 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:37 crc kubenswrapper[4774]: I1121 16:46:37.931426 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:38 crc kubenswrapper[4774]: I1121 16:46:38.007982 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x7xf"] Nov 21 16:46:39 crc kubenswrapper[4774]: I1121 16:46:39.865914 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5x7xf" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="registry-server" containerID="cri-o://78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a" gracePeriod=2 Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.404769 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.471163 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-catalog-content\") pod \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.471213 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8k8k\" (UniqueName: \"kubernetes.io/projected/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-kube-api-access-g8k8k\") pod \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.471360 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-utilities\") pod \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\" (UID: \"ba481d9f-6cae-4686-bc9f-21c48c6fffc1\") " Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.472564 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-utilities" (OuterVolumeSpecName: "utilities") pod "ba481d9f-6cae-4686-bc9f-21c48c6fffc1" (UID: "ba481d9f-6cae-4686-bc9f-21c48c6fffc1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.477993 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-kube-api-access-g8k8k" (OuterVolumeSpecName: "kube-api-access-g8k8k") pod "ba481d9f-6cae-4686-bc9f-21c48c6fffc1" (UID: "ba481d9f-6cae-4686-bc9f-21c48c6fffc1"). InnerVolumeSpecName "kube-api-access-g8k8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.574135 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8k8k\" (UniqueName: \"kubernetes.io/projected/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-kube-api-access-g8k8k\") on node \"crc\" DevicePath \"\"" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.574180 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.732092 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ba481d9f-6cae-4686-bc9f-21c48c6fffc1" (UID: "ba481d9f-6cae-4686-bc9f-21c48c6fffc1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.779747 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ba481d9f-6cae-4686-bc9f-21c48c6fffc1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.880853 4774 generic.go:334] "Generic (PLEG): container finished" podID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerID="78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a" exitCode=0 Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.880910 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerDied","Data":"78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a"} Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.880954 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5x7xf" event={"ID":"ba481d9f-6cae-4686-bc9f-21c48c6fffc1","Type":"ContainerDied","Data":"7586a754fd480872b26bcd5cf779a66fe895a8be6961a75022248bbe19329662"} Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.880973 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5x7xf" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.880979 4774 scope.go:117] "RemoveContainer" containerID="78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.904686 4774 scope.go:117] "RemoveContainer" containerID="b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.928892 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5x7xf"] Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.943401 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5x7xf"] Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.953092 4774 scope.go:117] "RemoveContainer" containerID="13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.998186 4774 scope.go:117] "RemoveContainer" containerID="78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a" Nov 21 16:46:40 crc kubenswrapper[4774]: E1121 16:46:40.999405 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a\": container with ID starting with 78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a not found: ID does not exist" containerID="78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.999463 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a"} err="failed to get container status \"78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a\": rpc error: code = NotFound desc = could not find container \"78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a\": container with ID starting with 78ee5ec6a9b62758d71994241668283ab77524e7af596ae6bd54bff7c617ec4a not found: ID does not exist" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.999496 4774 scope.go:117] "RemoveContainer" containerID="b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae" Nov 21 16:46:40 crc kubenswrapper[4774]: E1121 16:46:40.999879 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae\": container with ID starting with b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae not found: ID does not exist" containerID="b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.999918 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae"} err="failed to get container status \"b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae\": rpc error: code = NotFound desc = could not find container \"b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae\": container with ID starting with b7f287234369dadf3319b5dcf9971882341c79c11b254f63383594456ea0faae not found: ID does not exist" Nov 21 16:46:40 crc kubenswrapper[4774]: I1121 16:46:40.999946 4774 scope.go:117] "RemoveContainer" containerID="13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c" Nov 21 16:46:41 crc kubenswrapper[4774]: E1121 16:46:41.001395 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c\": container with ID starting with 13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c not found: ID does not exist" containerID="13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c" Nov 21 16:46:41 crc kubenswrapper[4774]: I1121 16:46:41.001429 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c"} err="failed to get container status \"13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c\": rpc error: code = NotFound desc = could not find container \"13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c\": container with ID starting with 13e716a5106ff0765372bad7a96898ddc69c3617cf1263ad74f69d11d8b2139c not found: ID does not exist" Nov 21 16:46:42 crc kubenswrapper[4774]: I1121 16:46:42.135182 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" path="/var/lib/kubelet/pods/ba481d9f-6cae-4686-bc9f-21c48c6fffc1/volumes" Nov 21 16:46:59 crc kubenswrapper[4774]: I1121 16:46:59.600421 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:46:59 crc kubenswrapper[4774]: I1121 16:46:59.601427 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.110811 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-v8pjc/must-gather-9mv46"] Nov 21 16:47:16 crc kubenswrapper[4774]: E1121 16:47:16.111616 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="registry-server" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.111629 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="registry-server" Nov 21 16:47:16 crc kubenswrapper[4774]: E1121 16:47:16.111656 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="extract-utilities" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.111662 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="extract-utilities" Nov 21 16:47:16 crc kubenswrapper[4774]: E1121 16:47:16.111678 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="extract-content" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.111684 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="extract-content" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.111951 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba481d9f-6cae-4686-bc9f-21c48c6fffc1" containerName="registry-server" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.113130 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.115796 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-v8pjc/must-gather-9mv46"] Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.117300 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-v8pjc"/"openshift-service-ca.crt" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.117375 4774 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-v8pjc"/"default-dockercfg-5bkl7" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.123551 4774 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-v8pjc"/"kube-root-ca.crt" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.216012 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7qhw\" (UniqueName: \"kubernetes.io/projected/0dafc0a9-dc6a-4a84-9191-9d914a319538-kube-api-access-d7qhw\") pod \"must-gather-9mv46\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.216406 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0dafc0a9-dc6a-4a84-9191-9d914a319538-must-gather-output\") pod \"must-gather-9mv46\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.318488 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7qhw\" (UniqueName: \"kubernetes.io/projected/0dafc0a9-dc6a-4a84-9191-9d914a319538-kube-api-access-d7qhw\") pod \"must-gather-9mv46\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.318688 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0dafc0a9-dc6a-4a84-9191-9d914a319538-must-gather-output\") pod \"must-gather-9mv46\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.319173 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0dafc0a9-dc6a-4a84-9191-9d914a319538-must-gather-output\") pod \"must-gather-9mv46\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.350055 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7qhw\" (UniqueName: \"kubernetes.io/projected/0dafc0a9-dc6a-4a84-9191-9d914a319538-kube-api-access-d7qhw\") pod \"must-gather-9mv46\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.434640 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:47:16 crc kubenswrapper[4774]: I1121 16:47:16.923513 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-v8pjc/must-gather-9mv46"] Nov 21 16:47:17 crc kubenswrapper[4774]: I1121 16:47:17.334750 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/must-gather-9mv46" event={"ID":"0dafc0a9-dc6a-4a84-9191-9d914a319538","Type":"ContainerStarted","Data":"cec46ec6f31710ead443b0398839b326f3ef215ec185534661cc4dff8b34d8f5"} Nov 21 16:47:26 crc kubenswrapper[4774]: I1121 16:47:26.431911 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/must-gather-9mv46" event={"ID":"0dafc0a9-dc6a-4a84-9191-9d914a319538","Type":"ContainerStarted","Data":"d8a22e0a61ee51f2a4c2d11a6e71683c951d09671a07a26d884aba66b3f7ee5c"} Nov 21 16:47:27 crc kubenswrapper[4774]: I1121 16:47:27.442022 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/must-gather-9mv46" event={"ID":"0dafc0a9-dc6a-4a84-9191-9d914a319538","Type":"ContainerStarted","Data":"e8aa9a5064304335851e5babcfe8b41cd64b9c64c8a01b2c1275ba350632722a"} Nov 21 16:47:27 crc kubenswrapper[4774]: I1121 16:47:27.464714 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-v8pjc/must-gather-9mv46" podStartSLOduration=2.231684377 podStartE2EDuration="11.464696877s" podCreationTimestamp="2025-11-21 16:47:16 +0000 UTC" firstStartedPulling="2025-11-21 16:47:16.933977921 +0000 UTC m=+9827.586177180" lastFinishedPulling="2025-11-21 16:47:26.166990421 +0000 UTC m=+9836.819189680" observedRunningTime="2025-11-21 16:47:27.454956278 +0000 UTC m=+9838.107155537" watchObservedRunningTime="2025-11-21 16:47:27.464696877 +0000 UTC m=+9838.116896136" Nov 21 16:47:29 crc kubenswrapper[4774]: I1121 16:47:29.600780 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:47:29 crc kubenswrapper[4774]: I1121 16:47:29.601245 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.476610 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-v8pjc/crc-debug-wrx4k"] Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.478604 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.534802 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvl9v\" (UniqueName: \"kubernetes.io/projected/b95b662c-3b35-48d6-8aeb-c3687e39d382-kube-api-access-bvl9v\") pod \"crc-debug-wrx4k\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.535254 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b95b662c-3b35-48d6-8aeb-c3687e39d382-host\") pod \"crc-debug-wrx4k\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.637162 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvl9v\" (UniqueName: \"kubernetes.io/projected/b95b662c-3b35-48d6-8aeb-c3687e39d382-kube-api-access-bvl9v\") pod \"crc-debug-wrx4k\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.637238 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b95b662c-3b35-48d6-8aeb-c3687e39d382-host\") pod \"crc-debug-wrx4k\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.637319 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b95b662c-3b35-48d6-8aeb-c3687e39d382-host\") pod \"crc-debug-wrx4k\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.659992 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvl9v\" (UniqueName: \"kubernetes.io/projected/b95b662c-3b35-48d6-8aeb-c3687e39d382-kube-api-access-bvl9v\") pod \"crc-debug-wrx4k\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: I1121 16:47:30.801237 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:47:30 crc kubenswrapper[4774]: W1121 16:47:30.838242 4774 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb95b662c_3b35_48d6_8aeb_c3687e39d382.slice/crio-9a68a56abf16ac51356566f87a230066620069813b84e38abb95fe3ce30af5a4 WatchSource:0}: Error finding container 9a68a56abf16ac51356566f87a230066620069813b84e38abb95fe3ce30af5a4: Status 404 returned error can't find the container with id 9a68a56abf16ac51356566f87a230066620069813b84e38abb95fe3ce30af5a4 Nov 21 16:47:31 crc kubenswrapper[4774]: I1121 16:47:31.481567 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" event={"ID":"b95b662c-3b35-48d6-8aeb-c3687e39d382","Type":"ContainerStarted","Data":"9a68a56abf16ac51356566f87a230066620069813b84e38abb95fe3ce30af5a4"} Nov 21 16:47:43 crc kubenswrapper[4774]: I1121 16:47:43.622189 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" event={"ID":"b95b662c-3b35-48d6-8aeb-c3687e39d382","Type":"ContainerStarted","Data":"a4762255761aac96aaf40a801bbdbbc98fabef38dcb9164d407c1c0599dc7566"} Nov 21 16:47:43 crc kubenswrapper[4774]: I1121 16:47:43.639382 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" podStartSLOduration=1.676851632 podStartE2EDuration="13.639367237s" podCreationTimestamp="2025-11-21 16:47:30 +0000 UTC" firstStartedPulling="2025-11-21 16:47:30.840044563 +0000 UTC m=+9841.492243822" lastFinishedPulling="2025-11-21 16:47:42.802560168 +0000 UTC m=+9853.454759427" observedRunningTime="2025-11-21 16:47:43.6363248 +0000 UTC m=+9854.288524059" watchObservedRunningTime="2025-11-21 16:47:43.639367237 +0000 UTC m=+9854.291566496" Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.600407 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.600943 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.600986 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.601803 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"863a82dc07f537f948868ebedea5b04fff97cceb2f10dbb0bd356fdc76e8ae25"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.601917 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://863a82dc07f537f948868ebedea5b04fff97cceb2f10dbb0bd356fdc76e8ae25" gracePeriod=600 Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.803940 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="863a82dc07f537f948868ebedea5b04fff97cceb2f10dbb0bd356fdc76e8ae25" exitCode=0 Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.803987 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"863a82dc07f537f948868ebedea5b04fff97cceb2f10dbb0bd356fdc76e8ae25"} Nov 21 16:47:59 crc kubenswrapper[4774]: I1121 16:47:59.804023 4774 scope.go:117] "RemoveContainer" containerID="d61a396538a2d139754c1886f2479dc94360ff06f46ef4cd2b217e83ccccfa28" Nov 21 16:48:00 crc kubenswrapper[4774]: I1121 16:48:00.817373 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b"} Nov 21 16:48:03 crc kubenswrapper[4774]: I1121 16:48:03.871384 4774 generic.go:334] "Generic (PLEG): container finished" podID="b95b662c-3b35-48d6-8aeb-c3687e39d382" containerID="a4762255761aac96aaf40a801bbdbbc98fabef38dcb9164d407c1c0599dc7566" exitCode=0 Nov 21 16:48:03 crc kubenswrapper[4774]: I1121 16:48:03.871462 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" event={"ID":"b95b662c-3b35-48d6-8aeb-c3687e39d382","Type":"ContainerDied","Data":"a4762255761aac96aaf40a801bbdbbc98fabef38dcb9164d407c1c0599dc7566"} Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.135882 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.169605 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-v8pjc/crc-debug-wrx4k"] Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.181095 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-v8pjc/crc-debug-wrx4k"] Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.288559 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b95b662c-3b35-48d6-8aeb-c3687e39d382-host\") pod \"b95b662c-3b35-48d6-8aeb-c3687e39d382\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.288693 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b95b662c-3b35-48d6-8aeb-c3687e39d382-host" (OuterVolumeSpecName: "host") pod "b95b662c-3b35-48d6-8aeb-c3687e39d382" (UID: "b95b662c-3b35-48d6-8aeb-c3687e39d382"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.288739 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvl9v\" (UniqueName: \"kubernetes.io/projected/b95b662c-3b35-48d6-8aeb-c3687e39d382-kube-api-access-bvl9v\") pod \"b95b662c-3b35-48d6-8aeb-c3687e39d382\" (UID: \"b95b662c-3b35-48d6-8aeb-c3687e39d382\") " Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.289600 4774 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/b95b662c-3b35-48d6-8aeb-c3687e39d382-host\") on node \"crc\" DevicePath \"\"" Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.306754 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b95b662c-3b35-48d6-8aeb-c3687e39d382-kube-api-access-bvl9v" (OuterVolumeSpecName: "kube-api-access-bvl9v") pod "b95b662c-3b35-48d6-8aeb-c3687e39d382" (UID: "b95b662c-3b35-48d6-8aeb-c3687e39d382"). InnerVolumeSpecName "kube-api-access-bvl9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.391693 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvl9v\" (UniqueName: \"kubernetes.io/projected/b95b662c-3b35-48d6-8aeb-c3687e39d382-kube-api-access-bvl9v\") on node \"crc\" DevicePath \"\"" Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.895132 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a68a56abf16ac51356566f87a230066620069813b84e38abb95fe3ce30af5a4" Nov 21 16:48:05 crc kubenswrapper[4774]: I1121 16:48:05.895198 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-wrx4k" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.104087 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b95b662c-3b35-48d6-8aeb-c3687e39d382" path="/var/lib/kubelet/pods/b95b662c-3b35-48d6-8aeb-c3687e39d382/volumes" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.382888 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-v8pjc/crc-debug-m4svh"] Nov 21 16:48:06 crc kubenswrapper[4774]: E1121 16:48:06.383400 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b95b662c-3b35-48d6-8aeb-c3687e39d382" containerName="container-00" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.383415 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="b95b662c-3b35-48d6-8aeb-c3687e39d382" containerName="container-00" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.383653 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="b95b662c-3b35-48d6-8aeb-c3687e39d382" containerName="container-00" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.384427 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.514022 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p955j\" (UniqueName: \"kubernetes.io/projected/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-kube-api-access-p955j\") pod \"crc-debug-m4svh\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.514174 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-host\") pod \"crc-debug-m4svh\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.616748 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p955j\" (UniqueName: \"kubernetes.io/projected/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-kube-api-access-p955j\") pod \"crc-debug-m4svh\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.617154 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-host\") pod \"crc-debug-m4svh\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.617517 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-host\") pod \"crc-debug-m4svh\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:06 crc kubenswrapper[4774]: I1121 16:48:06.918471 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p955j\" (UniqueName: \"kubernetes.io/projected/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-kube-api-access-p955j\") pod \"crc-debug-m4svh\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:07 crc kubenswrapper[4774]: I1121 16:48:07.004293 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:07 crc kubenswrapper[4774]: I1121 16:48:07.913590 4774 generic.go:334] "Generic (PLEG): container finished" podID="8627c34a-4955-4f05-9ab5-3dd8327f1c1c" containerID="b52893a3463f2da598a9fafdef4e501c2b0c8efdca7219c4f6aca0bf0f581d1c" exitCode=1 Nov 21 16:48:07 crc kubenswrapper[4774]: I1121 16:48:07.913661 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/crc-debug-m4svh" event={"ID":"8627c34a-4955-4f05-9ab5-3dd8327f1c1c","Type":"ContainerDied","Data":"b52893a3463f2da598a9fafdef4e501c2b0c8efdca7219c4f6aca0bf0f581d1c"} Nov 21 16:48:07 crc kubenswrapper[4774]: I1121 16:48:07.914153 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/crc-debug-m4svh" event={"ID":"8627c34a-4955-4f05-9ab5-3dd8327f1c1c","Type":"ContainerStarted","Data":"ee111364a3c0db76c4e7b74edd0e72c2632200bfe3f0413aa01666b589afa765"} Nov 21 16:48:07 crc kubenswrapper[4774]: I1121 16:48:07.951624 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-v8pjc/crc-debug-m4svh"] Nov 21 16:48:07 crc kubenswrapper[4774]: I1121 16:48:07.959697 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-v8pjc/crc-debug-m4svh"] Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.037174 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.173116 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p955j\" (UniqueName: \"kubernetes.io/projected/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-kube-api-access-p955j\") pod \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.173203 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-host\") pod \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\" (UID: \"8627c34a-4955-4f05-9ab5-3dd8327f1c1c\") " Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.175160 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-host" (OuterVolumeSpecName: "host") pod "8627c34a-4955-4f05-9ab5-3dd8327f1c1c" (UID: "8627c34a-4955-4f05-9ab5-3dd8327f1c1c"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.180413 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-kube-api-access-p955j" (OuterVolumeSpecName: "kube-api-access-p955j") pod "8627c34a-4955-4f05-9ab5-3dd8327f1c1c" (UID: "8627c34a-4955-4f05-9ab5-3dd8327f1c1c"). InnerVolumeSpecName "kube-api-access-p955j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.275351 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p955j\" (UniqueName: \"kubernetes.io/projected/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-kube-api-access-p955j\") on node \"crc\" DevicePath \"\"" Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.275384 4774 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8627c34a-4955-4f05-9ab5-3dd8327f1c1c-host\") on node \"crc\" DevicePath \"\"" Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.933180 4774 scope.go:117] "RemoveContainer" containerID="b52893a3463f2da598a9fafdef4e501c2b0c8efdca7219c4f6aca0bf0f581d1c" Nov 21 16:48:09 crc kubenswrapper[4774]: I1121 16:48:09.933250 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/crc-debug-m4svh" Nov 21 16:48:10 crc kubenswrapper[4774]: I1121 16:48:10.104381 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8627c34a-4955-4f05-9ab5-3dd8327f1c1c" path="/var/lib/kubelet/pods/8627c34a-4955-4f05-9ab5-3dd8327f1c1c/volumes" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.612616 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-n57f4"] Nov 21 16:49:36 crc kubenswrapper[4774]: E1121 16:49:36.613777 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8627c34a-4955-4f05-9ab5-3dd8327f1c1c" containerName="container-00" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.613793 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="8627c34a-4955-4f05-9ab5-3dd8327f1c1c" containerName="container-00" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.614025 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="8627c34a-4955-4f05-9ab5-3dd8327f1c1c" containerName="container-00" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.615620 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.626770 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n57f4"] Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.689873 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mm7n5\" (UniqueName: \"kubernetes.io/projected/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-kube-api-access-mm7n5\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.690011 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-utilities\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.690079 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-catalog-content\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.792085 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mm7n5\" (UniqueName: \"kubernetes.io/projected/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-kube-api-access-mm7n5\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.792470 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-utilities\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.792659 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-catalog-content\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.793055 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-utilities\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.793179 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-catalog-content\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.823544 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mm7n5\" (UniqueName: \"kubernetes.io/projected/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-kube-api-access-mm7n5\") pod \"redhat-marketplace-n57f4\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:36 crc kubenswrapper[4774]: I1121 16:49:36.937632 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:37 crc kubenswrapper[4774]: I1121 16:49:37.475069 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-n57f4"] Nov 21 16:49:37 crc kubenswrapper[4774]: I1121 16:49:37.940801 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerStarted","Data":"f3e0a75a8838434e80a529c819dd73986849164103ef8eca5807e2f82ad5de94"} Nov 21 16:49:37 crc kubenswrapper[4774]: I1121 16:49:37.941629 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerStarted","Data":"ca5e1f281b1ae14bd0816522e8263751069ead792e4f26a5e63ee43935d02187"} Nov 21 16:49:38 crc kubenswrapper[4774]: I1121 16:49:38.954455 4774 generic.go:334] "Generic (PLEG): container finished" podID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerID="f3e0a75a8838434e80a529c819dd73986849164103ef8eca5807e2f82ad5de94" exitCode=0 Nov 21 16:49:38 crc kubenswrapper[4774]: I1121 16:49:38.954524 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerDied","Data":"f3e0a75a8838434e80a529c819dd73986849164103ef8eca5807e2f82ad5de94"} Nov 21 16:49:39 crc kubenswrapper[4774]: I1121 16:49:39.968279 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerStarted","Data":"693066c47253b99a43256e91a24b636ffd70caf6badcbb18c5631b7454d83680"} Nov 21 16:49:40 crc kubenswrapper[4774]: I1121 16:49:40.979951 4774 generic.go:334] "Generic (PLEG): container finished" podID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerID="693066c47253b99a43256e91a24b636ffd70caf6badcbb18c5631b7454d83680" exitCode=0 Nov 21 16:49:40 crc kubenswrapper[4774]: I1121 16:49:40.980366 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerDied","Data":"693066c47253b99a43256e91a24b636ffd70caf6badcbb18c5631b7454d83680"} Nov 21 16:49:41 crc kubenswrapper[4774]: I1121 16:49:41.997712 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerStarted","Data":"ec36d5c070e0db2d66822164ec854b8312a8f0b51250f1058872e970c16f8d9f"} Nov 21 16:49:42 crc kubenswrapper[4774]: I1121 16:49:42.028056 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-n57f4" podStartSLOduration=3.57467252 podStartE2EDuration="6.028038651s" podCreationTimestamp="2025-11-21 16:49:36 +0000 UTC" firstStartedPulling="2025-11-21 16:49:38.958307712 +0000 UTC m=+9969.610506981" lastFinishedPulling="2025-11-21 16:49:41.411673853 +0000 UTC m=+9972.063873112" observedRunningTime="2025-11-21 16:49:42.015996627 +0000 UTC m=+9972.668195896" watchObservedRunningTime="2025-11-21 16:49:42.028038651 +0000 UTC m=+9972.680237910" Nov 21 16:49:46 crc kubenswrapper[4774]: I1121 16:49:46.937853 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:46 crc kubenswrapper[4774]: I1121 16:49:46.938553 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:47 crc kubenswrapper[4774]: I1121 16:49:47.652042 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:47 crc kubenswrapper[4774]: I1121 16:49:47.709794 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:47 crc kubenswrapper[4774]: I1121 16:49:47.892749 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n57f4"] Nov 21 16:49:49 crc kubenswrapper[4774]: I1121 16:49:49.072483 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-n57f4" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="registry-server" containerID="cri-o://ec36d5c070e0db2d66822164ec854b8312a8f0b51250f1058872e970c16f8d9f" gracePeriod=2 Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.083425 4774 generic.go:334] "Generic (PLEG): container finished" podID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerID="ec36d5c070e0db2d66822164ec854b8312a8f0b51250f1058872e970c16f8d9f" exitCode=0 Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.083487 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerDied","Data":"ec36d5c070e0db2d66822164ec854b8312a8f0b51250f1058872e970c16f8d9f"} Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.083736 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-n57f4" event={"ID":"3dd0b8a9-451f-4acf-8821-d85a2ba9f821","Type":"ContainerDied","Data":"ca5e1f281b1ae14bd0816522e8263751069ead792e4f26a5e63ee43935d02187"} Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.083754 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca5e1f281b1ae14bd0816522e8263751069ead792e4f26a5e63ee43935d02187" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.110652 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.186864 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-catalog-content\") pod \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.186941 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mm7n5\" (UniqueName: \"kubernetes.io/projected/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-kube-api-access-mm7n5\") pod \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.187069 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-utilities\") pod \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\" (UID: \"3dd0b8a9-451f-4acf-8821-d85a2ba9f821\") " Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.188414 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-utilities" (OuterVolumeSpecName: "utilities") pod "3dd0b8a9-451f-4acf-8821-d85a2ba9f821" (UID: "3dd0b8a9-451f-4acf-8821-d85a2ba9f821"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.193244 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-kube-api-access-mm7n5" (OuterVolumeSpecName: "kube-api-access-mm7n5") pod "3dd0b8a9-451f-4acf-8821-d85a2ba9f821" (UID: "3dd0b8a9-451f-4acf-8821-d85a2ba9f821"). InnerVolumeSpecName "kube-api-access-mm7n5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.206302 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3dd0b8a9-451f-4acf-8821-d85a2ba9f821" (UID: "3dd0b8a9-451f-4acf-8821-d85a2ba9f821"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.289725 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.289756 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mm7n5\" (UniqueName: \"kubernetes.io/projected/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-kube-api-access-mm7n5\") on node \"crc\" DevicePath \"\"" Nov 21 16:49:50 crc kubenswrapper[4774]: I1121 16:49:50.289766 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3dd0b8a9-451f-4acf-8821-d85a2ba9f821-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:49:51 crc kubenswrapper[4774]: I1121 16:49:51.093447 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-n57f4" Nov 21 16:49:51 crc kubenswrapper[4774]: I1121 16:49:51.127951 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-n57f4"] Nov 21 16:49:51 crc kubenswrapper[4774]: I1121 16:49:51.138025 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-n57f4"] Nov 21 16:49:52 crc kubenswrapper[4774]: I1121 16:49:52.107649 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" path="/var/lib/kubelet/pods/3dd0b8a9-451f-4acf-8821-d85a2ba9f821/volumes" Nov 21 16:50:29 crc kubenswrapper[4774]: I1121 16:50:29.601075 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:50:29 crc kubenswrapper[4774]: I1121 16:50:29.601781 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:50:59 crc kubenswrapper[4774]: I1121 16:50:59.601445 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:50:59 crc kubenswrapper[4774]: I1121 16:50:59.602840 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:51:15 crc kubenswrapper[4774]: I1121 16:51:15.845078 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c15aed44-a7b6-416f-90bd-2a42764b1e68/init-config-reloader/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.040699 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c15aed44-a7b6-416f-90bd-2a42764b1e68/config-reloader/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.048865 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c15aed44-a7b6-416f-90bd-2a42764b1e68/init-config-reloader/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.059402 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_c15aed44-a7b6-416f-90bd-2a42764b1e68/alertmanager/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.276337 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_75d01969-354d-45d1-8bbd-f3fb0b04e0bd/aodh-api/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.296938 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_75d01969-354d-45d1-8bbd-f3fb0b04e0bd/aodh-evaluator/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.308985 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_75d01969-354d-45d1-8bbd-f3fb0b04e0bd/aodh-listener/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.474595 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_aodh-0_75d01969-354d-45d1-8bbd-f3fb0b04e0bd/aodh-notifier/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.553652 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-55dd675f88-stgkz_5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3/barbican-api/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.639472 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-55dd675f88-stgkz_5b6d8e18-2f4f-40fd-a29f-4ab576d68ea3/barbican-api-log/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.784208 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-65689d7cb-qmp65_08f110f4-3615-41c7-8954-f450c651fe05/barbican-keystone-listener/0.log" Nov 21 16:51:16 crc kubenswrapper[4774]: I1121 16:51:16.872578 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-65689d7cb-qmp65_08f110f4-3615-41c7-8954-f450c651fe05/barbican-keystone-listener-log/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.252251 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bb856d459-sqtcr_ca28a1b6-e307-463d-af12-65024dddb2a7/barbican-worker/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.291914 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bb856d459-sqtcr_ca28a1b6-e307-463d-af12-65024dddb2a7/barbican-worker-log/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.460033 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-openstack-openstack-cell1-xh5wz_8643272b-86b9-496c-826e-148e3d20ce71/bootstrap-openstack-openstack-cell1/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.532882 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b/ceilometer-central-agent/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.686146 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b/proxy-httpd/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.708941 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b/ceilometer-notification-agent/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.734912 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_ee6332a4-c9aa-46fa-9fd1-e7c1a0a4db1b/sg-core/0.log" Nov 21 16:51:17 crc kubenswrapper[4774]: I1121 16:51:17.918670 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-client-openstack-openstack-cell1-vtdwk_63fc2b59-5dcc-4b9f-a8c0-0877f1490778/ceph-client-openstack-openstack-cell1/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.050783 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_582f360a-4d3f-4177-989c-b4c05a1013df/cinder-api/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.157996 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_582f360a-4d3f-4177-989c-b4c05a1013df/cinder-api-log/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.428227 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_546269f7-ad07-47a4-9a7b-7d98236673c2/cinder-backup/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.455631 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-backup-0_546269f7-ad07-47a4-9a7b-7d98236673c2/probe/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.486668 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2c96ad48-aa6a-4f51-a95c-12971f46255f/cinder-scheduler/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.719725 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_2c96ad48-aa6a-4f51-a95c-12971f46255f/probe/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.758989 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_77096ba7-ee2f-41f7-9457-a85714d0881c/cinder-volume/0.log" Nov 21 16:51:18 crc kubenswrapper[4774]: I1121 16:51:18.842275 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-volume-volume1-0_77096ba7-ee2f-41f7-9457-a85714d0881c/probe/0.log" Nov 21 16:51:19 crc kubenswrapper[4774]: I1121 16:51:19.052322 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-openstack-openstack-cell1-t9t7b_1d149903-950a-438b-89f2-840ef6fca469/configure-network-openstack-openstack-cell1/0.log" Nov 21 16:51:19 crc kubenswrapper[4774]: I1121 16:51:19.111803 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-openstack-openstack-cell1-mklzh_7d471f88-4d25-4cce-84d1-61ac88d8a740/configure-os-openstack-openstack-cell1/0.log" Nov 21 16:51:19 crc kubenswrapper[4774]: I1121 16:51:19.263027 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7fb67cb889-62d6d_c334c682-df4a-4fc9-9672-651075da5c61/init/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.020608 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7fb67cb889-62d6d_c334c682-df4a-4fc9-9672-651075da5c61/init/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.031033 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-7fb67cb889-62d6d_c334c682-df4a-4fc9-9672-651075da5c61/dnsmasq-dns/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.072114 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-openstack-openstack-cell1-jd5xr_467f535e-8c0e-43b0-b241-a85801fcb00a/download-cache-openstack-openstack-cell1/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.251133 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3ed244fd-40a8-41fe-84dc-b291ec15dd87/glance-httpd/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.295439 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_3ed244fd-40a8-41fe-84dc-b291ec15dd87/glance-log/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.353387 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_da63df4b-d3aa-48c0-9d11-834c1c7e825b/glance-httpd/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.404619 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_da63df4b-d3aa-48c0-9d11-834c1c7e825b/glance-log/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.617130 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-api-5958684764-r77rb_b6837766-9ca4-42d6-a7a8-15ce3cbb14aa/heat-api/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.735474 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-cfnapi-698d644556-pdzp8_b1f38ecd-a119-493e-bdf7-63e4b253586d/heat-cfnapi/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.788633 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_heat-engine-94df8c594-xh8vx_19b1e55a-0939-4fba-97a1-b1d3c8d9e14a/heat-engine/0.log" Nov 21 16:51:20 crc kubenswrapper[4774]: I1121 16:51:20.927447 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-846b7d466c-wl6n8_4ac90da4-62d3-4985-83da-d106def413db/horizon/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.035870 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-openstack-openstack-cell1-t8cr4_ee250856-b49e-4620-9067-bd30f9324f0b/install-certs-openstack-openstack-cell1/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.077401 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-846b7d466c-wl6n8_4ac90da4-62d3-4985-83da-d106def413db/horizon-log/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.230832 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-openstack-openstack-cell1-sgn2s_fb254ff4-7f74-4263-96a4-b09712df47f1/install-os-openstack-openstack-cell1/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.433884 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-84f6f47465-k6cnt_56867905-10e8-4f90-8716-3a1db96bcb2b/keystone-api/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.897899 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29395681-6hjcc_575dd31f-98c7-44ff-9fcc-1c29de6d845f/keystone-cron/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.943129 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_9c160743-659f-4bb2-9bc5-2da61867bb84/kube-state-metrics/0.log" Nov 21 16:51:21 crc kubenswrapper[4774]: I1121 16:51:21.962947 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-openstack-openstack-cell1-b55tj_bf4ece17-48c3-4137-9e1a-44d545af4a88/libvirt-openstack-openstack-cell1/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.169372 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_c773d1ce-8355-4e9e-b667-259d4090ae94/manila-api-log/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.265181 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-api-0_c773d1ce-8355-4e9e-b667-259d4090ae94/manila-api/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.354075 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_fc220fc3-ac7d-4c67-8b6c-cb3f15e61099/manila-scheduler/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.407645 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-scheduler-0_fc220fc3-ac7d-4c67-8b6c-cb3f15e61099/probe/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.522289 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_7187f8c3-a88d-4b53-9f36-3d3aaa44a426/probe/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.530157 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_manila-share-share1-0_7187f8c3-a88d-4b53-9f36-3d3aaa44a426/manila-share/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.864932 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5f6c49b5df-znqbm_6363746f-24af-4fef-918b-395b65ba5242/neutron-httpd/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.923044 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5f6c49b5df-znqbm_6363746f-24af-4fef-918b-395b65ba5242/neutron-api/0.log" Nov 21 16:51:22 crc kubenswrapper[4774]: I1121 16:51:22.993853 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-dhcp-openstack-openstack-cell1-8l9vq_56441432-7ab5-47a5-85fc-d584e665625e/neutron-dhcp-openstack-openstack-cell1/0.log" Nov 21 16:51:23 crc kubenswrapper[4774]: I1121 16:51:23.243665 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-openstack-openstack-cell1-s78c9_4fd555c3-cec8-4965-8851-4fbe8106ec02/neutron-metadata-openstack-openstack-cell1/0.log" Nov 21 16:51:23 crc kubenswrapper[4774]: I1121 16:51:23.317222 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-sriov-openstack-openstack-cell1-xs7fw_180963e5-c4a2-40c8-9f16-26ea5b01cfbf/neutron-sriov-openstack-openstack-cell1/0.log" Nov 21 16:51:23 crc kubenswrapper[4774]: I1121 16:51:23.545243 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7/nova-api-api/0.log" Nov 21 16:51:23 crc kubenswrapper[4774]: I1121 16:51:23.664285 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_fd16d6fd-b2c0-4cb2-821c-98f79e2c6ca7/nova-api-log/0.log" Nov 21 16:51:24 crc kubenswrapper[4774]: I1121 16:51:24.017372 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_f141d1f0-7f09-41e7-a3f1-b921ebcf68e0/nova-cell0-conductor-conductor/0.log" Nov 21 16:51:24 crc kubenswrapper[4774]: I1121 16:51:24.042236 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_eb049da2-5aaa-427a-bce8-3bb7843aa828/nova-cell1-conductor-conductor/0.log" Nov 21 16:51:24 crc kubenswrapper[4774]: I1121 16:51:24.317387 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_fadb611f-7092-4459-8b6a-3aeba1e8a7ac/nova-cell1-novncproxy-novncproxy/0.log" Nov 21 16:51:24 crc kubenswrapper[4774]: I1121 16:51:24.647733 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cellbpm7l_d79df006-a98f-45ed-9ef1-ddbf5e3143c5/nova-cell1-openstack-nova-compute-ffu-cell1-openstack-cell1/0.log" Nov 21 16:51:24 crc kubenswrapper[4774]: I1121 16:51:24.742394 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-openstack-openstack-cell1-6x4bb_573bb757-eff3-426f-a71e-7d1c21f6cf67/nova-cell1-openstack-openstack-cell1/0.log" Nov 21 16:51:24 crc kubenswrapper[4774]: I1121 16:51:24.958403 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ce4fb5ae-af56-4695-b264-e399649045f5/nova-metadata-log/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.010245 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_ce4fb5ae-af56-4695-b264-e399649045f5/nova-metadata-metadata/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.282882 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-58f57bccc4-t26gh_7d7b8c2e-d524-42cf-bb67-3fb652fa55a5/init/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.322967 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_5083fc02-32d1-460e-899b-1d2e383296c5/nova-scheduler-scheduler/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.477907 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-58f57bccc4-t26gh_7d7b8c2e-d524-42cf-bb67-3fb652fa55a5/init/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.591670 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-58f57bccc4-t26gh_7d7b8c2e-d524-42cf-bb67-3fb652fa55a5/octavia-api-provider-agent/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.735606 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-qhw6s_f680c438-04e4-45a2-9996-f6668b99065d/init/0.log" Nov 21 16:51:25 crc kubenswrapper[4774]: I1121 16:51:25.829799 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-api-58f57bccc4-t26gh_7d7b8c2e-d524-42cf-bb67-3fb652fa55a5/octavia-api/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.268130 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-qhw6s_f680c438-04e4-45a2-9996-f6668b99065d/init/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.377931 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-healthmanager-qhw6s_f680c438-04e4-45a2-9996-f6668b99065d/octavia-healthmanager/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.430539 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-kppw9_e3f74551-4d82-4d29-adc1-9116631e39c0/init/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.624608 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-kppw9_e3f74551-4d82-4d29-adc1-9116631e39c0/init/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.662462 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-housekeeping-kppw9_e3f74551-4d82-4d29-adc1-9116631e39c0/octavia-housekeeping/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.731887 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-5955f5554b-z7sj5_a2c727b3-da08-42a5-8b9c-364f73fbfc0e/init/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.952780 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-5955f5554b-z7sj5_a2c727b3-da08-42a5-8b9c-364f73fbfc0e/octavia-amphora-httpd/0.log" Nov 21 16:51:26 crc kubenswrapper[4774]: I1121 16:51:26.967371 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-image-upload-5955f5554b-z7sj5_a2c727b3-da08-42a5-8b9c-364f73fbfc0e/init/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.053843 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-c48wg_ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8/init/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.304558 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-c48wg_ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8/octavia-rsyslog/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.317075 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-dwldc_fb9639ba-9284-43d2-8a1d-0354ae6d4d11/init/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.381285 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-rsyslog-c48wg_ecccdf94-54d3-44a1-b56c-7f5d9ed64ac8/init/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.558437 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-dwldc_fb9639ba-9284-43d2-8a1d-0354ae6d4d11/init/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.734092 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6b63eba9-23bf-47d2-8568-370d10b96150/mysql-bootstrap/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.782517 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_octavia-worker-dwldc_fb9639ba-9284-43d2-8a1d-0354ae6d4d11/octavia-worker/0.log" Nov 21 16:51:27 crc kubenswrapper[4774]: I1121 16:51:27.964068 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6b63eba9-23bf-47d2-8568-370d10b96150/mysql-bootstrap/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.050350 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_6b63eba9-23bf-47d2-8568-370d10b96150/galera/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.064036 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_14d4cc6c-e087-4c4c-8d38-2c0dce0d210c/mysql-bootstrap/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.225687 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_14d4cc6c-e087-4c4c-8d38-2c0dce0d210c/mysql-bootstrap/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.273336 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_14d4cc6c-e087-4c4c-8d38-2c0dce0d210c/galera/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.381317 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ad5e9f00-e24f-4ebe-b915-1330652111e6/openstackclient/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.471018 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-htq55_2284d243-017a-4de4-bdd1-5e5d05e56c92/openstack-network-exporter/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.618891 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-mkddd_7e9568d8-f5a4-4bd2-93a2-08df43d611e7/ovn-controller/0.log" Nov 21 16:51:28 crc kubenswrapper[4774]: I1121 16:51:28.805224 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-svdd5_0b0db751-b2bd-4d68-a90c-c4c6e2b75216/ovsdb-server-init/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.005045 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-svdd5_0b0db751-b2bd-4d68-a90c-c4c6e2b75216/ovsdb-server-init/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.043901 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-svdd5_0b0db751-b2bd-4d68-a90c-c4c6e2b75216/ovs-vswitchd/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.132793 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-svdd5_0b0db751-b2bd-4d68-a90c-c4c6e2b75216/ovsdb-server/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.234180 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_09e968b0-0cb1-43c1-b3c6-12873fa2d80e/openstack-network-exporter/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.295991 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_09e968b0-0cb1-43c1-b3c6-12873fa2d80e/ovn-northd/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.514636 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_11da5c53-952b-4bb6-bb73-a47bd209d574/openstack-network-exporter/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.600906 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.600987 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.601053 4774 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.602701 4774 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b"} pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.603087 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" containerID="cri-o://6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" gracePeriod=600 Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.703318 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_11da5c53-952b-4bb6-bb73-a47bd209d574/ovsdbserver-nb/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.711896 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-openstack-openstack-cell1-nz7qj_687eea4a-3909-493e-a5a6-74f84cd247b1/ovn-openstack-openstack-cell1/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: E1121 16:51:29.731906 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.819970 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_42447951-828b-43e4-af24-86669d2c25b9/openstack-network-exporter/0.log" Nov 21 16:51:29 crc kubenswrapper[4774]: I1121 16:51:29.971177 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-1_42447951-828b-43e4-af24-86669d2c25b9/ovsdbserver-nb/0.log" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.054228 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_c77605fe-1da0-4848-9319-1235551dd807/openstack-network-exporter/0.log" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.094006 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-2_c77605fe-1da0-4848-9319-1235551dd807/ovsdbserver-nb/0.log" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.201645 4774 generic.go:334] "Generic (PLEG): container finished" podID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" exitCode=0 Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.201686 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerDied","Data":"6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b"} Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.201721 4774 scope.go:117] "RemoveContainer" containerID="863a82dc07f537f948868ebedea5b04fff97cceb2f10dbb0bd356fdc76e8ae25" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.202487 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:51:30 crc kubenswrapper[4774]: E1121 16:51:30.202782 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.305983 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_94b57d87-612f-4d44-84f2-9cd3ffcf5ff7/openstack-network-exporter/0.log" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.377013 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_94b57d87-612f-4d44-84f2-9cd3ffcf5ff7/ovsdbserver-sb/0.log" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.536492 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_24d15b5b-e658-4b9b-8d23-4ecaf3308bc5/openstack-network-exporter/0.log" Nov 21 16:51:30 crc kubenswrapper[4774]: I1121 16:51:30.601011 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-1_24d15b5b-e658-4b9b-8d23-4ecaf3308bc5/ovsdbserver-sb/0.log" Nov 21 16:51:31 crc kubenswrapper[4774]: I1121 16:51:31.380673 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_95b1e4bc-598a-40b1-ba11-38997e4c5f41/ovsdbserver-sb/0.log" Nov 21 16:51:31 crc kubenswrapper[4774]: I1121 16:51:31.428328 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-2_95b1e4bc-598a-40b1-ba11-38997e4c5f41/openstack-network-exporter/0.log" Nov 21 16:51:31 crc kubenswrapper[4774]: I1121 16:51:31.472042 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-64f867456d-c2t4b_c1f65da0-5cc3-4448-96b0-61e27f2506cb/placement-api/0.log" Nov 21 16:51:31 crc kubenswrapper[4774]: I1121 16:51:31.671876 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_pre-adoption-validation-openstack-pre-adoption-openstack-cm8w8j_3eaa2587-5f2d-4df9-a322-3261da7ca988/pre-adoption-validation-openstack-pre-adoption-openstack-cell1/0.log" Nov 21 16:51:31 crc kubenswrapper[4774]: I1121 16:51:31.724061 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-64f867456d-c2t4b_c1f65da0-5cc3-4448-96b0-61e27f2506cb/placement-log/0.log" Nov 21 16:51:31 crc kubenswrapper[4774]: I1121 16:51:31.899371 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_7b305f49-43b7-43d8-972d-d07ace53858f/init-config-reloader/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.102145 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_7b305f49-43b7-43d8-972d-d07ace53858f/config-reloader/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.104393 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_7b305f49-43b7-43d8-972d-d07ace53858f/init-config-reloader/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.122706 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_7b305f49-43b7-43d8-972d-d07ace53858f/thanos-sidecar/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.126324 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_7b305f49-43b7-43d8-972d-d07ace53858f/prometheus/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.342965 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2ee0feed-380f-455b-be85-6eae06c085e7/setup-container/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.533871 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2ee0feed-380f-455b-be85-6eae06c085e7/setup-container/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.578066 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2ee0feed-380f-455b-be85-6eae06c085e7/rabbitmq/0.log" Nov 21 16:51:32 crc kubenswrapper[4774]: I1121 16:51:32.649212 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_552d8f65-7177-4f9b-a454-a31a1528b17f/setup-container/0.log" Nov 21 16:51:33 crc kubenswrapper[4774]: I1121 16:51:33.436784 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_552d8f65-7177-4f9b-a454-a31a1528b17f/setup-container/0.log" Nov 21 16:51:33 crc kubenswrapper[4774]: I1121 16:51:33.477702 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_552d8f65-7177-4f9b-a454-a31a1528b17f/rabbitmq/0.log" Nov 21 16:51:33 crc kubenswrapper[4774]: I1121 16:51:33.506392 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-openstack-openstack-cell1-tnwgq_98752e83-384a-4828-b900-1b8b62522ece/reboot-os-openstack-openstack-cell1/0.log" Nov 21 16:51:33 crc kubenswrapper[4774]: I1121 16:51:33.700237 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-openstack-openstack-cell1-d4tnk_c084e8b3-667e-4b63-b370-a667a4b0cda6/run-os-openstack-openstack-cell1/0.log" Nov 21 16:51:33 crc kubenswrapper[4774]: I1121 16:51:33.784751 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-openstack-pf4n6_c2331cdc-adfd-4e78-b7eb-c91e7518b9c3/ssh-known-hosts-openstack/0.log" Nov 21 16:51:34 crc kubenswrapper[4774]: I1121 16:51:34.024919 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-openstack-openstack-cell1-5whzr_01366752-fe13-4f55-be84-723e156bfdb1/telemetry-openstack-openstack-cell1/0.log" Nov 21 16:51:34 crc kubenswrapper[4774]: I1121 16:51:34.167670 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tripleo-cleanup-tripleo-cleanup-openstack-cell1-dn9sx_90a66815-e9c7-4b6e-869e-661af63e3e00/tripleo-cleanup-tripleo-cleanup-openstack-cell1/0.log" Nov 21 16:51:34 crc kubenswrapper[4774]: I1121 16:51:34.246470 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-openstack-openstack-cell1-z64fz_2fbbb706-71fc-47de-b788-ecb529f77d77/validate-network-openstack-openstack-cell1/0.log" Nov 21 16:51:34 crc kubenswrapper[4774]: I1121 16:51:34.407341 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_730f0937-f89d-410c-879c-5f561cffc548/memcached/0.log" Nov 21 16:51:45 crc kubenswrapper[4774]: I1121 16:51:45.093521 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:51:45 crc kubenswrapper[4774]: E1121 16:51:45.094155 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.445609 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/util/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.637172 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/util/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.638369 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/pull/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.644602 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/pull/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.807800 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/extract/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.835558 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/pull/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.841626 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2f600d40645c3b5d5826f2c9954a3807468e61602beafe19f4b735f42dffh9f_841c2b76-6113-4cc9-a146-67a723c67ad4/util/0.log" Nov 21 16:51:55 crc kubenswrapper[4774]: I1121 16:51:55.991381 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7768f8c84f-jf5xt_f50b16b0-3430-4378-a32c-8d09f402108e/kube-rbac-proxy/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.102111 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6d8fd67bf7-vfpjr_c28fdb9b-2f84-41f0-ae41-977dca177484/kube-rbac-proxy/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.113700 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7768f8c84f-jf5xt_f50b16b0-3430-4378-a32c-8d09f402108e/manager/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.298943 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6d8fd67bf7-vfpjr_c28fdb9b-2f84-41f0-ae41-977dca177484/manager/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.325669 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-56dfb6b67f-qs9lx_cf13e842-38d2-409c-87f8-3163868965d8/manager/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.358626 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-56dfb6b67f-qs9lx_cf13e842-38d2-409c-87f8-3163868965d8/kube-rbac-proxy/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.519193 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8667fbf6f6-mx2n9_c2a3d34a-eca8-4106-8a2b-47254b1af44b/kube-rbac-proxy/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.635166 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-8667fbf6f6-mx2n9_c2a3d34a-eca8-4106-8a2b-47254b1af44b/manager/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.715396 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-bf4c6585d-ctkn6_496286dc-00cb-42ae-914e-4d8769847726/kube-rbac-proxy/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.800600 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-bf4c6585d-ctkn6_496286dc-00cb-42ae-914e-4d8769847726/manager/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.841357 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5d86b44686-c8wfq_d22140fe-1eb2-4f64-84e5-1d3ad3902a94/kube-rbac-proxy/0.log" Nov 21 16:51:56 crc kubenswrapper[4774]: I1121 16:51:56.915489 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-5d86b44686-c8wfq_d22140fe-1eb2-4f64-84e5-1d3ad3902a94/manager/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.035880 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-769d9c7585-7hqmf_7e09cfc1-a56d-49fb-ac6f-f9007b4a4128/kube-rbac-proxy/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.276352 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5c75d7c94b-9j9tx_340633c8-9873-455b-9ad3-617764d7f1ad/kube-rbac-proxy/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.300219 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-5c75d7c94b-9j9tx_340633c8-9873-455b-9ad3-617764d7f1ad/manager/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.385730 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-769d9c7585-7hqmf_7e09cfc1-a56d-49fb-ac6f-f9007b4a4128/manager/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.508612 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7879fb76fd-7qkkw_881c2298-a491-4657-9982-55fe889c9b4f/kube-rbac-proxy/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.590368 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7879fb76fd-7qkkw_881c2298-a491-4657-9982-55fe889c9b4f/manager/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.610682 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7bb88cb858-7xd6m_47c58999-f804-4a5c-bcc7-3aae79eab6da/kube-rbac-proxy/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.740509 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7bb88cb858-7xd6m_47c58999-f804-4a5c-bcc7-3aae79eab6da/manager/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.800294 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6f8c5b86cb-8qwvm_2f2e330f-c352-4cd9-afd0-bf306e99fb39/kube-rbac-proxy/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.863098 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6f8c5b86cb-8qwvm_2f2e330f-c352-4cd9-afd0-bf306e99fb39/manager/0.log" Nov 21 16:51:57 crc kubenswrapper[4774]: I1121 16:51:57.985696 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-66b7d6f598-x8k99_c63b62b2-ef4d-4138-9d5c-e7b087ac25ce/kube-rbac-proxy/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.089377 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-66b7d6f598-x8k99_c63b62b2-ef4d-4138-9d5c-e7b087ac25ce/manager/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.240282 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-86d796d84d-87ktc_4e5dbca6-27a9-4c2b-81e1-4a062af18fa2/kube-rbac-proxy/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.359668 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fdc856c5d-d5wzw_5644bfb5-eee6-4ecd-976c-00ae40333bf3/kube-rbac-proxy/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.451237 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-86d796d84d-87ktc_4e5dbca6-27a9-4c2b-81e1-4a062af18fa2/manager/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.539541 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-6fdc856c5d-d5wzw_5644bfb5-eee6-4ecd-976c-00ae40333bf3/manager/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.606927 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk_fc7b50ee-89ac-491f-8dfd-23a32ccf9e82/kube-rbac-proxy/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.694572 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6c655cdc6c5nqjk_fc7b50ee-89ac-491f-8dfd-23a32ccf9e82/manager/0.log" Nov 21 16:51:58 crc kubenswrapper[4774]: I1121 16:51:58.805494 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7755d5f8cc-9cc79_efa3c204-339b-4adc-ba7f-614c918c7873/kube-rbac-proxy/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.033318 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-77c7f689f5-f5rhg_ba0840f2-15c3-48ed-bd53-7057786f734a/kube-rbac-proxy/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.094506 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:51:59 crc kubenswrapper[4774]: E1121 16:51:59.095055 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.152287 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-77c7f689f5-f5rhg_ba0840f2-15c3-48ed-bd53-7057786f734a/operator/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.324166 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5bdf4f7f7f-mz68p_e304c11a-e256-4c84-a317-b8b7eadd767a/kube-rbac-proxy/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.398256 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-fqdjt_8f966582-0f84-41c5-ad5a-b96988e4368e/registry-server/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.470987 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-5bdf4f7f7f-mz68p_e304c11a-e256-4c84-a317-b8b7eadd767a/manager/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.566326 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-6dc664666c-z95rc_62889a75-0d04-4f7f-b03e-225eaee9ce86/kube-rbac-proxy/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.724950 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-6dc664666c-z95rc_62889a75-0d04-4f7f-b03e-225eaee9ce86/manager/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.803525 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-qsc6s_70619ac6-0265-4fe9-aad9-f9e4797ac7f9/operator/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.976268 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-799cb6ffd6-gxhk7_53fd7850-6e67-4a0f-88c5-ecb3870ce1aa/manager/0.log" Nov 21 16:51:59 crc kubenswrapper[4774]: I1121 16:51:59.992394 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-799cb6ffd6-gxhk7_53fd7850-6e67-4a0f-88c5-ecb3870ce1aa/kube-rbac-proxy/0.log" Nov 21 16:52:00 crc kubenswrapper[4774]: I1121 16:52:00.156107 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-7798859c74-ksx5x_d60cbec8-ea75-476b-b4a2-9ff3272a11c1/kube-rbac-proxy/0.log" Nov 21 16:52:00 crc kubenswrapper[4774]: I1121 16:52:00.363784 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-8464cf66df-8w5rz_1c1c222a-bb01-48b9-8115-1a4a35278047/kube-rbac-proxy/0.log" Nov 21 16:52:00 crc kubenswrapper[4774]: I1121 16:52:00.407016 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-7798859c74-ksx5x_d60cbec8-ea75-476b-b4a2-9ff3272a11c1/manager/0.log" Nov 21 16:52:00 crc kubenswrapper[4774]: I1121 16:52:00.438465 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-8464cf66df-8w5rz_1c1c222a-bb01-48b9-8115-1a4a35278047/manager/0.log" Nov 21 16:52:00 crc kubenswrapper[4774]: I1121 16:52:00.561486 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7cd4fb6f79-mtc8r_3d0b08e2-b552-4a9a-a87d-bfae30f9045d/kube-rbac-proxy/0.log" Nov 21 16:52:00 crc kubenswrapper[4774]: I1121 16:52:00.585460 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-7cd4fb6f79-mtc8r_3d0b08e2-b552-4a9a-a87d-bfae30f9045d/manager/0.log" Nov 21 16:52:01 crc kubenswrapper[4774]: I1121 16:52:01.142928 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7755d5f8cc-9cc79_efa3c204-339b-4adc-ba7f-614c918c7873/manager/0.log" Nov 21 16:52:14 crc kubenswrapper[4774]: I1121 16:52:14.093516 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:52:14 crc kubenswrapper[4774]: E1121 16:52:14.094258 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:52:17 crc kubenswrapper[4774]: I1121 16:52:17.284666 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-c8jn9_c3717d22-e2b1-427b-8585-9ba3daa3b61c/control-plane-machine-set-operator/0.log" Nov 21 16:52:17 crc kubenswrapper[4774]: I1121 16:52:17.451314 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-8h89j_6ded279c-1738-42b5-8828-e4883c3756bf/kube-rbac-proxy/0.log" Nov 21 16:52:17 crc kubenswrapper[4774]: I1121 16:52:17.461871 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-8h89j_6ded279c-1738-42b5-8828-e4883c3756bf/machine-api-operator/0.log" Nov 21 16:52:29 crc kubenswrapper[4774]: I1121 16:52:29.094015 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:52:29 crc kubenswrapper[4774]: E1121 16:52:29.095988 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:52:30 crc kubenswrapper[4774]: I1121 16:52:30.033740 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-b57xc_e1ea63d6-75ce-4b59-83c9-9f63a0d6f740/cert-manager-controller/0.log" Nov 21 16:52:30 crc kubenswrapper[4774]: I1121 16:52:30.239596 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-dn8mb_da46cbb6-cd5f-4d50-ad76-23fc875189cb/cert-manager-webhook/0.log" Nov 21 16:52:30 crc kubenswrapper[4774]: I1121 16:52:30.247163 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-sg8g8_86914560-07d8-4113-90f0-e5549a06d52f/cert-manager-cainjector/0.log" Nov 21 16:52:41 crc kubenswrapper[4774]: I1121 16:52:41.092967 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:52:41 crc kubenswrapper[4774]: E1121 16:52:41.093741 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:52:43 crc kubenswrapper[4774]: I1121 16:52:43.105257 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5874bd7bc5-vk5n6_a49e02ad-f5a9-45a8-b1e5-b688d18383b5/nmstate-console-plugin/0.log" Nov 21 16:52:43 crc kubenswrapper[4774]: I1121 16:52:43.297733 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-85d2j_6a94325f-9054-4c04-a0fa-64490ec11e50/kube-rbac-proxy/0.log" Nov 21 16:52:43 crc kubenswrapper[4774]: I1121 16:52:43.301573 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-wwdp7_d1402901-5738-45cc-8122-8a6f0b711e7c/nmstate-handler/0.log" Nov 21 16:52:43 crc kubenswrapper[4774]: I1121 16:52:43.383231 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-5dcf9c57c5-85d2j_6a94325f-9054-4c04-a0fa-64490ec11e50/nmstate-metrics/0.log" Nov 21 16:52:43 crc kubenswrapper[4774]: I1121 16:52:43.533130 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-557fdffb88-889z8_7f7c6f72-ef8d-4c52-8ed9-2d37c82733be/nmstate-operator/0.log" Nov 21 16:52:43 crc kubenswrapper[4774]: I1121 16:52:43.610951 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6b89b748d8-cpslg_3eb78967-8171-4bec-8eac-616d427e4a8a/nmstate-webhook/0.log" Nov 21 16:52:54 crc kubenswrapper[4774]: I1121 16:52:54.093127 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:52:54 crc kubenswrapper[4774]: E1121 16:52:54.094963 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.117677 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-575gd_b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9/kube-rbac-proxy/0.log" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.414703 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-frr-files/0.log" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.502377 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-575gd_b41e9d86-4f0e-4cee-b6f3-44b53a4a12a9/controller/0.log" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.576934 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-frr-files/0.log" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.608903 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-metrics/0.log" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.633222 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-reloader/0.log" Nov 21 16:52:59 crc kubenswrapper[4774]: I1121 16:52:59.691538 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-reloader/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.102990 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-reloader/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.103119 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-metrics/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.165168 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-metrics/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.169975 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-frr-files/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.295423 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-frr-files/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.295452 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-reloader/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.337307 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/cp-metrics/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.380507 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/controller/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.485557 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/frr-metrics/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.562349 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/kube-rbac-proxy/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.589562 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/kube-rbac-proxy-frr/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.792995 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/reloader/0.log" Nov 21 16:53:00 crc kubenswrapper[4774]: I1121 16:53:00.796458 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-7mgxh_88f1b63d-3ba8-494f-9331-1e50303360dd/frr-k8s-webhook-server/0.log" Nov 21 16:53:01 crc kubenswrapper[4774]: I1121 16:53:01.049749 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6b9b569fbd-zv6fk_70f8c673-949c-45d9-881b-eaf99a5fc797/manager/0.log" Nov 21 16:53:01 crc kubenswrapper[4774]: I1121 16:53:01.298452 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-944c54d44-trkz5_5ede42a0-d5c9-4693-b58c-e11a09887d36/webhook-server/0.log" Nov 21 16:53:01 crc kubenswrapper[4774]: I1121 16:53:01.333812 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gvjv6_03b7ac83-7cfb-4adb-9460-e9dd4c2c911a/kube-rbac-proxy/0.log" Nov 21 16:53:02 crc kubenswrapper[4774]: I1121 16:53:02.390002 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-gvjv6_03b7ac83-7cfb-4adb-9460-e9dd4c2c911a/speaker/0.log" Nov 21 16:53:04 crc kubenswrapper[4774]: I1121 16:53:04.441785 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-vxqzf_a2e96c23-b50a-4c74-9bac-e46d87db1d0a/frr/0.log" Nov 21 16:53:07 crc kubenswrapper[4774]: I1121 16:53:07.093354 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:53:07 crc kubenswrapper[4774]: E1121 16:53:07.094240 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:53:15 crc kubenswrapper[4774]: I1121 16:53:15.528500 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/util/0.log" Nov 21 16:53:15 crc kubenswrapper[4774]: I1121 16:53:15.771328 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/pull/0.log" Nov 21 16:53:15 crc kubenswrapper[4774]: I1121 16:53:15.839489 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/util/0.log" Nov 21 16:53:15 crc kubenswrapper[4774]: I1121 16:53:15.842052 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/pull/0.log" Nov 21 16:53:16 crc kubenswrapper[4774]: I1121 16:53:16.000278 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/util/0.log" Nov 21 16:53:16 crc kubenswrapper[4774]: I1121 16:53:16.057844 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/extract/0.log" Nov 21 16:53:16 crc kubenswrapper[4774]: I1121 16:53:16.078222 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931apqb94_a87574a6-adb5-41d6-935f-5f1e7ce90e7f/pull/0.log" Nov 21 16:53:16 crc kubenswrapper[4774]: I1121 16:53:16.908438 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/util/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.070833 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/pull/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.091018 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/util/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.112445 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/pull/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.285295 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/pull/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.336346 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/util/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.336894 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5c796334424b8139919e908729ac8fe5c1f6e7b6bc33540f00b4f8772erh2qj_aeb69fbb-0e79-498d-87c8-6b2bcef5607f/extract/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.510624 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/util/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.675357 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/pull/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.720266 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/util/0.log" Nov 21 16:53:17 crc kubenswrapper[4774]: I1121 16:53:17.739451 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/pull/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.111863 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/util/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.115476 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/extract/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.133434 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210vvvbw_dfe3578c-e945-477d-9a6d-ade2a4563182/pull/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.315797 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/extract-utilities/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.506043 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/extract-content/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.520249 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/extract-utilities/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.591748 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/extract-content/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.743913 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/extract-utilities/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.768195 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/extract-content/0.log" Nov 21 16:53:18 crc kubenswrapper[4774]: I1121 16:53:18.961426 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/extract-utilities/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.093402 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:53:19 crc kubenswrapper[4774]: E1121 16:53:19.093622 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.117067 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/extract-utilities/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.179747 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/extract-content/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.220406 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/extract-content/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.348480 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/extract-utilities/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.394897 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/extract-content/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.574484 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/util/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.838520 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/util/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.844347 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/pull/0.log" Nov 21 16:53:19 crc kubenswrapper[4774]: I1121 16:53:19.949168 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/pull/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.175212 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/extract/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.221785 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/util/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.223506 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6vr9kr_e45fb0a0-a378-42d0-8ff1-c9f03dfb02fd/pull/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.469415 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-5nkpf_81b3a21d-90d3-446a-b6ab-f3be7356fd56/marketplace-operator/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.635609 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/extract-utilities/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.907271 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/extract-utilities/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.911216 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/extract-content/0.log" Nov 21 16:53:20 crc kubenswrapper[4774]: I1121 16:53:20.926496 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/extract-content/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.105781 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-97n98_8ff4d566-1b6b-47b0-8112-6fdd7a77ecbb/registry-server/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.171078 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/extract-content/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.171112 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/extract-utilities/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.383935 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/extract-utilities/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.408202 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-stwtl_a661380a-1987-4c11-a202-448d6fca796e/registry-server/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.572042 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/extract-utilities/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.602567 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-rmcf7_4f9fbbc3-d25f-46e1-9db6-49a5505385cb/registry-server/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.629982 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/extract-content/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.647337 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/extract-content/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.788913 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/extract-utilities/0.log" Nov 21 16:53:21 crc kubenswrapper[4774]: I1121 16:53:21.836245 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/extract-content/0.log" Nov 21 16:53:22 crc kubenswrapper[4774]: I1121 16:53:22.997253 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-crjv9_739d794c-64f6-4e61-8761-2e270429f355/registry-server/0.log" Nov 21 16:53:30 crc kubenswrapper[4774]: I1121 16:53:30.094252 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:53:30 crc kubenswrapper[4774]: E1121 16:53:30.095183 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:53:34 crc kubenswrapper[4774]: I1121 16:53:34.848538 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-std25_5ea09086-0f96-4641-ac00-84ad39559acc/prometheus-operator/0.log" Nov 21 16:53:35 crc kubenswrapper[4774]: I1121 16:53:35.038202 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-879c99469-n85w9_5dbe1bcc-00ab-42f2-aa80-7e8fad97f9f8/prometheus-operator-admission-webhook/0.log" Nov 21 16:53:35 crc kubenswrapper[4774]: I1121 16:53:35.046577 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-879c99469-plznc_8a13a74a-7bd0-4e11-9dec-402a38c7e984/prometheus-operator-admission-webhook/0.log" Nov 21 16:53:35 crc kubenswrapper[4774]: I1121 16:53:35.224397 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-cv785_cda01f23-488b-459e-8ec4-f4825f188d16/operator/0.log" Nov 21 16:53:35 crc kubenswrapper[4774]: I1121 16:53:35.255645 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-54wn4_cf419f67-423f-4dcb-86e6-ad76fd3a9489/perses-operator/0.log" Nov 21 16:53:45 crc kubenswrapper[4774]: I1121 16:53:45.093576 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:53:45 crc kubenswrapper[4774]: E1121 16:53:45.094638 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:53:48 crc kubenswrapper[4774]: E1121 16:53:48.180941 4774 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.181:37868->38.102.83.181:39405: read tcp 38.102.83.181:37868->38.102.83.181:39405: read: connection reset by peer Nov 21 16:53:54 crc kubenswrapper[4774]: I1121 16:53:54.876634 4774 scope.go:117] "RemoveContainer" containerID="a4762255761aac96aaf40a801bbdbbc98fabef38dcb9164d407c1c0599dc7566" Nov 21 16:53:59 crc kubenswrapper[4774]: I1121 16:53:59.093328 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:53:59 crc kubenswrapper[4774]: E1121 16:53:59.094232 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:54:10 crc kubenswrapper[4774]: I1121 16:54:10.102322 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:54:10 crc kubenswrapper[4774]: E1121 16:54:10.103067 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:54:21 crc kubenswrapper[4774]: I1121 16:54:21.093800 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:54:21 crc kubenswrapper[4774]: E1121 16:54:21.094788 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:54:33 crc kubenswrapper[4774]: I1121 16:54:33.093939 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:54:33 crc kubenswrapper[4774]: E1121 16:54:33.094862 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:54:44 crc kubenswrapper[4774]: I1121 16:54:44.094636 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:54:44 crc kubenswrapper[4774]: E1121 16:54:44.095446 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:54:56 crc kubenswrapper[4774]: I1121 16:54:56.093301 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:54:56 crc kubenswrapper[4774]: E1121 16:54:56.094209 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:55:10 crc kubenswrapper[4774]: I1121 16:55:10.102807 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:55:10 crc kubenswrapper[4774]: E1121 16:55:10.105070 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.068714 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tkg7w"] Nov 21 16:55:19 crc kubenswrapper[4774]: E1121 16:55:19.069807 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="registry-server" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.069843 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="registry-server" Nov 21 16:55:19 crc kubenswrapper[4774]: E1121 16:55:19.069858 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="extract-utilities" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.069865 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="extract-utilities" Nov 21 16:55:19 crc kubenswrapper[4774]: E1121 16:55:19.069894 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="extract-content" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.069901 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="extract-content" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.070172 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd0b8a9-451f-4acf-8821-d85a2ba9f821" containerName="registry-server" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.072009 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.073338 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-catalog-content\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.073427 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-utilities\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.073455 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntcp6\" (UniqueName: \"kubernetes.io/projected/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-kube-api-access-ntcp6\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.091841 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tkg7w"] Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.175179 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-catalog-content\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.175837 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-catalog-content\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.176514 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-utilities\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.176916 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-utilities\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.177002 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntcp6\" (UniqueName: \"kubernetes.io/projected/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-kube-api-access-ntcp6\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.198789 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntcp6\" (UniqueName: \"kubernetes.io/projected/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-kube-api-access-ntcp6\") pod \"redhat-operators-tkg7w\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.406289 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:19 crc kubenswrapper[4774]: I1121 16:55:19.989388 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tkg7w"] Nov 21 16:55:20 crc kubenswrapper[4774]: I1121 16:55:20.800674 4774 generic.go:334] "Generic (PLEG): container finished" podID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerID="0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9" exitCode=0 Nov 21 16:55:20 crc kubenswrapper[4774]: I1121 16:55:20.800888 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerDied","Data":"0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9"} Nov 21 16:55:20 crc kubenswrapper[4774]: I1121 16:55:20.801229 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerStarted","Data":"4df6c5a0ecda382c8c954ba8fa8300ccb911c1702b386585c45b9236aad9228f"} Nov 21 16:55:20 crc kubenswrapper[4774]: I1121 16:55:20.805603 4774 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 21 16:55:22 crc kubenswrapper[4774]: I1121 16:55:22.848172 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerStarted","Data":"65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576"} Nov 21 16:55:24 crc kubenswrapper[4774]: I1121 16:55:24.093354 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:55:24 crc kubenswrapper[4774]: E1121 16:55:24.095179 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:55:27 crc kubenswrapper[4774]: I1121 16:55:27.918146 4774 generic.go:334] "Generic (PLEG): container finished" podID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerID="65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576" exitCode=0 Nov 21 16:55:27 crc kubenswrapper[4774]: I1121 16:55:27.918284 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerDied","Data":"65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576"} Nov 21 16:55:28 crc kubenswrapper[4774]: I1121 16:55:28.933588 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerStarted","Data":"74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e"} Nov 21 16:55:28 crc kubenswrapper[4774]: I1121 16:55:28.950750 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tkg7w" podStartSLOduration=2.336057745 podStartE2EDuration="9.950728944s" podCreationTimestamp="2025-11-21 16:55:19 +0000 UTC" firstStartedPulling="2025-11-21 16:55:20.805289774 +0000 UTC m=+10311.457489033" lastFinishedPulling="2025-11-21 16:55:28.419960973 +0000 UTC m=+10319.072160232" observedRunningTime="2025-11-21 16:55:28.950427735 +0000 UTC m=+10319.602626994" watchObservedRunningTime="2025-11-21 16:55:28.950728944 +0000 UTC m=+10319.602928203" Nov 21 16:55:29 crc kubenswrapper[4774]: I1121 16:55:29.406751 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:29 crc kubenswrapper[4774]: I1121 16:55:29.406884 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:30 crc kubenswrapper[4774]: I1121 16:55:30.472310 4774 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tkg7w" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="registry-server" probeResult="failure" output=< Nov 21 16:55:30 crc kubenswrapper[4774]: timeout: failed to connect service ":50051" within 1s Nov 21 16:55:30 crc kubenswrapper[4774]: > Nov 21 16:55:36 crc kubenswrapper[4774]: I1121 16:55:36.093069 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:55:36 crc kubenswrapper[4774]: E1121 16:55:36.093894 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:55:39 crc kubenswrapper[4774]: I1121 16:55:39.458605 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:39 crc kubenswrapper[4774]: I1121 16:55:39.509291 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:39 crc kubenswrapper[4774]: I1121 16:55:39.703786 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tkg7w"] Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.062160 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tkg7w" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="registry-server" containerID="cri-o://74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e" gracePeriod=2 Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.578139 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.683470 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-catalog-content\") pod \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.683662 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-utilities\") pod \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.684461 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-utilities" (OuterVolumeSpecName: "utilities") pod "83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" (UID: "83ec3432-5b0d-42f5-b4bf-7fde7f2a271d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.684654 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntcp6\" (UniqueName: \"kubernetes.io/projected/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-kube-api-access-ntcp6\") pod \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\" (UID: \"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d\") " Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.685437 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.692006 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-kube-api-access-ntcp6" (OuterVolumeSpecName: "kube-api-access-ntcp6") pod "83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" (UID: "83ec3432-5b0d-42f5-b4bf-7fde7f2a271d"). InnerVolumeSpecName "kube-api-access-ntcp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.780956 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" (UID: "83ec3432-5b0d-42f5-b4bf-7fde7f2a271d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.787581 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntcp6\" (UniqueName: \"kubernetes.io/projected/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-kube-api-access-ntcp6\") on node \"crc\" DevicePath \"\"" Nov 21 16:55:41 crc kubenswrapper[4774]: I1121 16:55:41.787614 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.076142 4774 generic.go:334] "Generic (PLEG): container finished" podID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerID="74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e" exitCode=0 Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.076192 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerDied","Data":"74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e"} Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.076224 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tkg7w" event={"ID":"83ec3432-5b0d-42f5-b4bf-7fde7f2a271d","Type":"ContainerDied","Data":"4df6c5a0ecda382c8c954ba8fa8300ccb911c1702b386585c45b9236aad9228f"} Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.076245 4774 scope.go:117] "RemoveContainer" containerID="74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.076278 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tkg7w" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.097173 4774 scope.go:117] "RemoveContainer" containerID="65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.123558 4774 scope.go:117] "RemoveContainer" containerID="0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.137328 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tkg7w"] Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.148828 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tkg7w"] Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.173459 4774 scope.go:117] "RemoveContainer" containerID="74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e" Nov 21 16:55:42 crc kubenswrapper[4774]: E1121 16:55:42.174015 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e\": container with ID starting with 74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e not found: ID does not exist" containerID="74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.174062 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e"} err="failed to get container status \"74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e\": rpc error: code = NotFound desc = could not find container \"74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e\": container with ID starting with 74bdcdb175d531382da0f2d43c9a538df8f7ca218eaab1bd1eec36d41b740c2e not found: ID does not exist" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.174088 4774 scope.go:117] "RemoveContainer" containerID="65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576" Nov 21 16:55:42 crc kubenswrapper[4774]: E1121 16:55:42.174544 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576\": container with ID starting with 65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576 not found: ID does not exist" containerID="65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.174592 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576"} err="failed to get container status \"65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576\": rpc error: code = NotFound desc = could not find container \"65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576\": container with ID starting with 65c098da3c84d634b9c90f36e4b75381916d25fb034134fbdd3f8b0ca8507576 not found: ID does not exist" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.174624 4774 scope.go:117] "RemoveContainer" containerID="0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9" Nov 21 16:55:42 crc kubenswrapper[4774]: E1121 16:55:42.175005 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9\": container with ID starting with 0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9 not found: ID does not exist" containerID="0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9" Nov 21 16:55:42 crc kubenswrapper[4774]: I1121 16:55:42.175040 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9"} err="failed to get container status \"0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9\": rpc error: code = NotFound desc = could not find container \"0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9\": container with ID starting with 0a68ee798b5fc04b864b74930ab93cf8787bacdfa732dcc16ba6c1084d06a5f9 not found: ID does not exist" Nov 21 16:55:44 crc kubenswrapper[4774]: I1121 16:55:44.108579 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" path="/var/lib/kubelet/pods/83ec3432-5b0d-42f5-b4bf-7fde7f2a271d/volumes" Nov 21 16:55:47 crc kubenswrapper[4774]: I1121 16:55:47.128055 4774 generic.go:334] "Generic (PLEG): container finished" podID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerID="d8a22e0a61ee51f2a4c2d11a6e71683c951d09671a07a26d884aba66b3f7ee5c" exitCode=0 Nov 21 16:55:47 crc kubenswrapper[4774]: I1121 16:55:47.128107 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-v8pjc/must-gather-9mv46" event={"ID":"0dafc0a9-dc6a-4a84-9191-9d914a319538","Type":"ContainerDied","Data":"d8a22e0a61ee51f2a4c2d11a6e71683c951d09671a07a26d884aba66b3f7ee5c"} Nov 21 16:55:47 crc kubenswrapper[4774]: I1121 16:55:47.129194 4774 scope.go:117] "RemoveContainer" containerID="d8a22e0a61ee51f2a4c2d11a6e71683c951d09671a07a26d884aba66b3f7ee5c" Nov 21 16:55:47 crc kubenswrapper[4774]: I1121 16:55:47.694406 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-v8pjc_must-gather-9mv46_0dafc0a9-dc6a-4a84-9191-9d914a319538/gather/0.log" Nov 21 16:55:48 crc kubenswrapper[4774]: I1121 16:55:48.100644 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:55:48 crc kubenswrapper[4774]: E1121 16:55:48.101005 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:55:54 crc kubenswrapper[4774]: I1121 16:55:54.991252 4774 scope.go:117] "RemoveContainer" containerID="ec36d5c070e0db2d66822164ec854b8312a8f0b51250f1058872e970c16f8d9f" Nov 21 16:55:55 crc kubenswrapper[4774]: I1121 16:55:55.021535 4774 scope.go:117] "RemoveContainer" containerID="693066c47253b99a43256e91a24b636ffd70caf6badcbb18c5631b7454d83680" Nov 21 16:55:55 crc kubenswrapper[4774]: I1121 16:55:55.053570 4774 scope.go:117] "RemoveContainer" containerID="f3e0a75a8838434e80a529c819dd73986849164103ef8eca5807e2f82ad5de94" Nov 21 16:55:55 crc kubenswrapper[4774]: I1121 16:55:55.810674 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-v8pjc/must-gather-9mv46"] Nov 21 16:55:55 crc kubenswrapper[4774]: I1121 16:55:55.811213 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-v8pjc/must-gather-9mv46" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="copy" containerID="cri-o://e8aa9a5064304335851e5babcfe8b41cd64b9c64c8a01b2c1275ba350632722a" gracePeriod=2 Nov 21 16:55:55 crc kubenswrapper[4774]: I1121 16:55:55.827400 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-v8pjc/must-gather-9mv46"] Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.244737 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-v8pjc_must-gather-9mv46_0dafc0a9-dc6a-4a84-9191-9d914a319538/copy/0.log" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.245197 4774 generic.go:334] "Generic (PLEG): container finished" podID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerID="e8aa9a5064304335851e5babcfe8b41cd64b9c64c8a01b2c1275ba350632722a" exitCode=143 Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.245245 4774 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cec46ec6f31710ead443b0398839b326f3ef215ec185534661cc4dff8b34d8f5" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.263864 4774 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-v8pjc_must-gather-9mv46_0dafc0a9-dc6a-4a84-9191-9d914a319538/copy/0.log" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.264787 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.319112 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7qhw\" (UniqueName: \"kubernetes.io/projected/0dafc0a9-dc6a-4a84-9191-9d914a319538-kube-api-access-d7qhw\") pod \"0dafc0a9-dc6a-4a84-9191-9d914a319538\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.319206 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0dafc0a9-dc6a-4a84-9191-9d914a319538-must-gather-output\") pod \"0dafc0a9-dc6a-4a84-9191-9d914a319538\" (UID: \"0dafc0a9-dc6a-4a84-9191-9d914a319538\") " Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.324343 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dafc0a9-dc6a-4a84-9191-9d914a319538-kube-api-access-d7qhw" (OuterVolumeSpecName: "kube-api-access-d7qhw") pod "0dafc0a9-dc6a-4a84-9191-9d914a319538" (UID: "0dafc0a9-dc6a-4a84-9191-9d914a319538"). InnerVolumeSpecName "kube-api-access-d7qhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.422433 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7qhw\" (UniqueName: \"kubernetes.io/projected/0dafc0a9-dc6a-4a84-9191-9d914a319538-kube-api-access-d7qhw\") on node \"crc\" DevicePath \"\"" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.518138 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dafc0a9-dc6a-4a84-9191-9d914a319538-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "0dafc0a9-dc6a-4a84-9191-9d914a319538" (UID: "0dafc0a9-dc6a-4a84-9191-9d914a319538"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:55:56 crc kubenswrapper[4774]: I1121 16:55:56.524274 4774 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/0dafc0a9-dc6a-4a84-9191-9d914a319538-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 21 16:55:57 crc kubenswrapper[4774]: I1121 16:55:57.252942 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-v8pjc/must-gather-9mv46" Nov 21 16:55:58 crc kubenswrapper[4774]: I1121 16:55:58.106307 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" path="/var/lib/kubelet/pods/0dafc0a9-dc6a-4a84-9191-9d914a319538/volumes" Nov 21 16:55:59 crc kubenswrapper[4774]: I1121 16:55:59.097246 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:55:59 crc kubenswrapper[4774]: E1121 16:55:59.098213 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:56:11 crc kubenswrapper[4774]: I1121 16:56:11.093698 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:56:11 crc kubenswrapper[4774]: E1121 16:56:11.094425 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:56:26 crc kubenswrapper[4774]: I1121 16:56:26.093623 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:56:26 crc kubenswrapper[4774]: E1121 16:56:26.094421 4774 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-jtxgb_openshift-machine-config-operator(3eb06dc6-b3cb-44b8-ba08-69bfac3661bd)\"" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" Nov 21 16:56:38 crc kubenswrapper[4774]: I1121 16:56:38.093111 4774 scope.go:117] "RemoveContainer" containerID="6e331a640da5c84d0f7e245295dc3f59ddc934bbe9391f398ad3b5355ad8825b" Nov 21 16:56:38 crc kubenswrapper[4774]: I1121 16:56:38.659213 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" event={"ID":"3eb06dc6-b3cb-44b8-ba08-69bfac3661bd","Type":"ContainerStarted","Data":"85adf971c877b7e1ba80fed3cc31c416c50ce80d5dcdd85420033207a6c72011"} Nov 21 16:56:55 crc kubenswrapper[4774]: I1121 16:56:55.179066 4774 scope.go:117] "RemoveContainer" containerID="d8a22e0a61ee51f2a4c2d11a6e71683c951d09671a07a26d884aba66b3f7ee5c" Nov 21 16:56:55 crc kubenswrapper[4774]: I1121 16:56:55.274109 4774 scope.go:117] "RemoveContainer" containerID="e8aa9a5064304335851e5babcfe8b41cd64b9c64c8a01b2c1275ba350632722a" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.656561 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vgzzx"] Nov 21 16:57:06 crc kubenswrapper[4774]: E1121 16:57:06.657654 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="extract-utilities" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.657673 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="extract-utilities" Nov 21 16:57:06 crc kubenswrapper[4774]: E1121 16:57:06.657698 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="copy" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.657708 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="copy" Nov 21 16:57:06 crc kubenswrapper[4774]: E1121 16:57:06.657737 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="registry-server" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.657745 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="registry-server" Nov 21 16:57:06 crc kubenswrapper[4774]: E1121 16:57:06.657771 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="extract-content" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.657779 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="extract-content" Nov 21 16:57:06 crc kubenswrapper[4774]: E1121 16:57:06.657790 4774 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="gather" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.657797 4774 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="gather" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.658070 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="gather" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.658084 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dafc0a9-dc6a-4a84-9191-9d914a319538" containerName="copy" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.658106 4774 memory_manager.go:354] "RemoveStaleState removing state" podUID="83ec3432-5b0d-42f5-b4bf-7fde7f2a271d" containerName="registry-server" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.660146 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.670050 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgzzx"] Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.710303 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-catalog-content\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.710418 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl88c\" (UniqueName: \"kubernetes.io/projected/eacdae7e-6ec2-470f-a71e-66bde888e689-kube-api-access-wl88c\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.710494 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-utilities\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.812742 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl88c\" (UniqueName: \"kubernetes.io/projected/eacdae7e-6ec2-470f-a71e-66bde888e689-kube-api-access-wl88c\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.812807 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-utilities\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.812978 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-catalog-content\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.813451 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-utilities\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.813463 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-catalog-content\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.832194 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl88c\" (UniqueName: \"kubernetes.io/projected/eacdae7e-6ec2-470f-a71e-66bde888e689-kube-api-access-wl88c\") pod \"certified-operators-vgzzx\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.854715 4774 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q6xlb"] Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.857321 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.877726 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q6xlb"] Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.915423 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-catalog-content\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.915474 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-utilities\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:06 crc kubenswrapper[4774]: I1121 16:57:06.915712 4774 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq679\" (UniqueName: \"kubernetes.io/projected/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-kube-api-access-gq679\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.004985 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.018435 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq679\" (UniqueName: \"kubernetes.io/projected/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-kube-api-access-gq679\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.018549 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-catalog-content\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.018608 4774 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-utilities\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.019087 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-catalog-content\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.019176 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-utilities\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.049635 4774 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq679\" (UniqueName: \"kubernetes.io/projected/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-kube-api-access-gq679\") pod \"community-operators-q6xlb\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.225752 4774 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.545309 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgzzx"] Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.731388 4774 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q6xlb"] Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.969991 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerStarted","Data":"5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7"} Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.970043 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerStarted","Data":"c84ebd39a10db8879b017bc8a0ee7dfa85fbd10e62b95a7813b5e480b746b2df"} Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.972058 4774 generic.go:334] "Generic (PLEG): container finished" podID="eacdae7e-6ec2-470f-a71e-66bde888e689" containerID="7454bdddcabe8d8bcc67d8ef6646d55d937a66b7e106a026c97b156f62a2e7f6" exitCode=0 Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.972103 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerDied","Data":"7454bdddcabe8d8bcc67d8ef6646d55d937a66b7e106a026c97b156f62a2e7f6"} Nov 21 16:57:07 crc kubenswrapper[4774]: I1121 16:57:07.972129 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerStarted","Data":"aa291d86e823abace8edbcc9aa62f8a929cddbb79de6d0a49eafc0795202dd55"} Nov 21 16:57:08 crc kubenswrapper[4774]: I1121 16:57:08.985165 4774 generic.go:334] "Generic (PLEG): container finished" podID="7ab342f4-8607-4d0a-8aff-b45fd840d5e5" containerID="5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7" exitCode=0 Nov 21 16:57:08 crc kubenswrapper[4774]: I1121 16:57:08.985266 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerDied","Data":"5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7"} Nov 21 16:57:08 crc kubenswrapper[4774]: I1121 16:57:08.995334 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerStarted","Data":"e77651fb7c2fd7609cc47a1758e06de979838bb5229d612d8e8f93728c427718"} Nov 21 16:57:10 crc kubenswrapper[4774]: I1121 16:57:10.008365 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerStarted","Data":"a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507"} Nov 21 16:57:11 crc kubenswrapper[4774]: I1121 16:57:11.026617 4774 generic.go:334] "Generic (PLEG): container finished" podID="eacdae7e-6ec2-470f-a71e-66bde888e689" containerID="e77651fb7c2fd7609cc47a1758e06de979838bb5229d612d8e8f93728c427718" exitCode=0 Nov 21 16:57:11 crc kubenswrapper[4774]: I1121 16:57:11.026719 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerDied","Data":"e77651fb7c2fd7609cc47a1758e06de979838bb5229d612d8e8f93728c427718"} Nov 21 16:57:12 crc kubenswrapper[4774]: I1121 16:57:12.038017 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerStarted","Data":"193e3fa4900fa9a3e476ff764f3c02ec51b4b6452ef98742f8ff3dd18eb369ce"} Nov 21 16:57:12 crc kubenswrapper[4774]: I1121 16:57:12.040842 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerDied","Data":"a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507"} Nov 21 16:57:12 crc kubenswrapper[4774]: I1121 16:57:12.040815 4774 generic.go:334] "Generic (PLEG): container finished" podID="7ab342f4-8607-4d0a-8aff-b45fd840d5e5" containerID="a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507" exitCode=0 Nov 21 16:57:12 crc kubenswrapper[4774]: I1121 16:57:12.071896 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vgzzx" podStartSLOduration=2.618900798 podStartE2EDuration="6.071863161s" podCreationTimestamp="2025-11-21 16:57:06 +0000 UTC" firstStartedPulling="2025-11-21 16:57:07.973388865 +0000 UTC m=+10418.625588124" lastFinishedPulling="2025-11-21 16:57:11.426351228 +0000 UTC m=+10422.078550487" observedRunningTime="2025-11-21 16:57:12.059477867 +0000 UTC m=+10422.711677126" watchObservedRunningTime="2025-11-21 16:57:12.071863161 +0000 UTC m=+10422.724062420" Nov 21 16:57:13 crc kubenswrapper[4774]: I1121 16:57:13.057441 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerStarted","Data":"9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8"} Nov 21 16:57:13 crc kubenswrapper[4774]: I1121 16:57:13.082503 4774 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q6xlb" podStartSLOduration=3.63494348 podStartE2EDuration="7.082484399s" podCreationTimestamp="2025-11-21 16:57:06 +0000 UTC" firstStartedPulling="2025-11-21 16:57:08.986642239 +0000 UTC m=+10419.638841498" lastFinishedPulling="2025-11-21 16:57:12.434183158 +0000 UTC m=+10423.086382417" observedRunningTime="2025-11-21 16:57:13.074441549 +0000 UTC m=+10423.726640808" watchObservedRunningTime="2025-11-21 16:57:13.082484399 +0000 UTC m=+10423.734683658" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.006090 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.006651 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.072156 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.144616 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.226951 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.227033 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:17 crc kubenswrapper[4774]: I1121 16:57:17.277127 4774 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:18 crc kubenswrapper[4774]: I1121 16:57:18.156569 4774 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:19 crc kubenswrapper[4774]: I1121 16:57:19.846904 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgzzx"] Nov 21 16:57:19 crc kubenswrapper[4774]: I1121 16:57:19.847418 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vgzzx" podUID="eacdae7e-6ec2-470f-a71e-66bde888e689" containerName="registry-server" containerID="cri-o://193e3fa4900fa9a3e476ff764f3c02ec51b4b6452ef98742f8ff3dd18eb369ce" gracePeriod=2 Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.132154 4774 generic.go:334] "Generic (PLEG): container finished" podID="eacdae7e-6ec2-470f-a71e-66bde888e689" containerID="193e3fa4900fa9a3e476ff764f3c02ec51b4b6452ef98742f8ff3dd18eb369ce" exitCode=0 Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.132248 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerDied","Data":"193e3fa4900fa9a3e476ff764f3c02ec51b4b6452ef98742f8ff3dd18eb369ce"} Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.330338 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.449482 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl88c\" (UniqueName: \"kubernetes.io/projected/eacdae7e-6ec2-470f-a71e-66bde888e689-kube-api-access-wl88c\") pod \"eacdae7e-6ec2-470f-a71e-66bde888e689\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.449764 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-catalog-content\") pod \"eacdae7e-6ec2-470f-a71e-66bde888e689\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.449869 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-utilities\") pod \"eacdae7e-6ec2-470f-a71e-66bde888e689\" (UID: \"eacdae7e-6ec2-470f-a71e-66bde888e689\") " Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.451346 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-utilities" (OuterVolumeSpecName: "utilities") pod "eacdae7e-6ec2-470f-a71e-66bde888e689" (UID: "eacdae7e-6ec2-470f-a71e-66bde888e689"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.456934 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eacdae7e-6ec2-470f-a71e-66bde888e689-kube-api-access-wl88c" (OuterVolumeSpecName: "kube-api-access-wl88c") pod "eacdae7e-6ec2-470f-a71e-66bde888e689" (UID: "eacdae7e-6ec2-470f-a71e-66bde888e689"). InnerVolumeSpecName "kube-api-access-wl88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.496243 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eacdae7e-6ec2-470f-a71e-66bde888e689" (UID: "eacdae7e-6ec2-470f-a71e-66bde888e689"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.552486 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.552556 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eacdae7e-6ec2-470f-a71e-66bde888e689-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.552584 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl88c\" (UniqueName: \"kubernetes.io/projected/eacdae7e-6ec2-470f-a71e-66bde888e689-kube-api-access-wl88c\") on node \"crc\" DevicePath \"\"" Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.643697 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q6xlb"] Nov 21 16:57:20 crc kubenswrapper[4774]: I1121 16:57:20.643970 4774 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q6xlb" podUID="7ab342f4-8607-4d0a-8aff-b45fd840d5e5" containerName="registry-server" containerID="cri-o://9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8" gracePeriod=2 Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.130885 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.157929 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgzzx" event={"ID":"eacdae7e-6ec2-470f-a71e-66bde888e689","Type":"ContainerDied","Data":"aa291d86e823abace8edbcc9aa62f8a929cddbb79de6d0a49eafc0795202dd55"} Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.157991 4774 scope.go:117] "RemoveContainer" containerID="193e3fa4900fa9a3e476ff764f3c02ec51b4b6452ef98742f8ff3dd18eb369ce" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.158130 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgzzx" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.167275 4774 generic.go:334] "Generic (PLEG): container finished" podID="7ab342f4-8607-4d0a-8aff-b45fd840d5e5" containerID="9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8" exitCode=0 Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.167320 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerDied","Data":"9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8"} Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.167355 4774 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q6xlb" event={"ID":"7ab342f4-8607-4d0a-8aff-b45fd840d5e5","Type":"ContainerDied","Data":"c84ebd39a10db8879b017bc8a0ee7dfa85fbd10e62b95a7813b5e480b746b2df"} Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.167425 4774 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q6xlb" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.194472 4774 scope.go:117] "RemoveContainer" containerID="e77651fb7c2fd7609cc47a1758e06de979838bb5229d612d8e8f93728c427718" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.202539 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgzzx"] Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.211312 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vgzzx"] Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.218611 4774 scope.go:117] "RemoveContainer" containerID="7454bdddcabe8d8bcc67d8ef6646d55d937a66b7e106a026c97b156f62a2e7f6" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.265882 4774 scope.go:117] "RemoveContainer" containerID="9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.266965 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-catalog-content\") pod \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.267149 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-utilities\") pod \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.267369 4774 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gq679\" (UniqueName: \"kubernetes.io/projected/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-kube-api-access-gq679\") pod \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\" (UID: \"7ab342f4-8607-4d0a-8aff-b45fd840d5e5\") " Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.268045 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-utilities" (OuterVolumeSpecName: "utilities") pod "7ab342f4-8607-4d0a-8aff-b45fd840d5e5" (UID: "7ab342f4-8607-4d0a-8aff-b45fd840d5e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.271741 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-kube-api-access-gq679" (OuterVolumeSpecName: "kube-api-access-gq679") pod "7ab342f4-8607-4d0a-8aff-b45fd840d5e5" (UID: "7ab342f4-8607-4d0a-8aff-b45fd840d5e5"). InnerVolumeSpecName "kube-api-access-gq679". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.315884 4774 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7ab342f4-8607-4d0a-8aff-b45fd840d5e5" (UID: "7ab342f4-8607-4d0a-8aff-b45fd840d5e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.358298 4774 scope.go:117] "RemoveContainer" containerID="a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.371816 4774 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.371925 4774 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-utilities\") on node \"crc\" DevicePath \"\"" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.371946 4774 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gq679\" (UniqueName: \"kubernetes.io/projected/7ab342f4-8607-4d0a-8aff-b45fd840d5e5-kube-api-access-gq679\") on node \"crc\" DevicePath \"\"" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.386955 4774 scope.go:117] "RemoveContainer" containerID="5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.405129 4774 scope.go:117] "RemoveContainer" containerID="9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8" Nov 21 16:57:21 crc kubenswrapper[4774]: E1121 16:57:21.405525 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8\": container with ID starting with 9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8 not found: ID does not exist" containerID="9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.405566 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8"} err="failed to get container status \"9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8\": rpc error: code = NotFound desc = could not find container \"9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8\": container with ID starting with 9a236bbb6f46e46ed116f2248d362d6e4e96fc2b3237bad684c29103c6cabde8 not found: ID does not exist" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.405588 4774 scope.go:117] "RemoveContainer" containerID="a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507" Nov 21 16:57:21 crc kubenswrapper[4774]: E1121 16:57:21.406004 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507\": container with ID starting with a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507 not found: ID does not exist" containerID="a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.406059 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507"} err="failed to get container status \"a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507\": rpc error: code = NotFound desc = could not find container \"a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507\": container with ID starting with a686b7dccdd596811dbb99e56b1e7d3890d1df765b100b7a6023943d8b7f0507 not found: ID does not exist" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.406081 4774 scope.go:117] "RemoveContainer" containerID="5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7" Nov 21 16:57:21 crc kubenswrapper[4774]: E1121 16:57:21.406318 4774 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7\": container with ID starting with 5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7 not found: ID does not exist" containerID="5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.406353 4774 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7"} err="failed to get container status \"5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7\": rpc error: code = NotFound desc = could not find container \"5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7\": container with ID starting with 5aa80bd42e2a2d2d04df280585fddb33d74dacf50073898df6acdfdffbda6ed7 not found: ID does not exist" Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.504274 4774 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q6xlb"] Nov 21 16:57:21 crc kubenswrapper[4774]: I1121 16:57:21.516617 4774 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q6xlb"] Nov 21 16:57:22 crc kubenswrapper[4774]: I1121 16:57:22.108808 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ab342f4-8607-4d0a-8aff-b45fd840d5e5" path="/var/lib/kubelet/pods/7ab342f4-8607-4d0a-8aff-b45fd840d5e5/volumes" Nov 21 16:57:22 crc kubenswrapper[4774]: I1121 16:57:22.110510 4774 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eacdae7e-6ec2-470f-a71e-66bde888e689" path="/var/lib/kubelet/pods/eacdae7e-6ec2-470f-a71e-66bde888e689/volumes" Nov 21 16:58:59 crc kubenswrapper[4774]: I1121 16:58:59.601372 4774 patch_prober.go:28] interesting pod/machine-config-daemon-jtxgb container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 21 16:58:59 crc kubenswrapper[4774]: I1121 16:58:59.601987 4774 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-jtxgb" podUID="3eb06dc6-b3cb-44b8-ba08-69bfac3661bd" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515110115133024434 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015110115134017352 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015110070135016476 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015110070135015446 5ustar corecore